mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
185 Commits
dylan/byte
...
patch-1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b8e354f3d | ||
|
|
e3bd882d9d | ||
|
|
d9fdb67d70 | ||
|
|
a09dc2f450 | ||
|
|
39e48ed244 | ||
|
|
db2f768bd9 | ||
|
|
cf1367137d | ||
|
|
5a12175cb0 | ||
|
|
ba20670da3 | ||
|
|
8c9c7894d6 | ||
|
|
73feb108d9 | ||
|
|
5179dad481 | ||
|
|
97f6adf767 | ||
|
|
8102e80f88 | ||
|
|
ed72eff2a9 | ||
|
|
665ea96076 | ||
|
|
da0babebd2 | ||
|
|
733e7f6165 | ||
|
|
d3ce459f0e | ||
|
|
e14e42b402 | ||
|
|
eb04e4e640 | ||
|
|
250d30eb7d | ||
|
|
0511fbf7b6 | ||
|
|
c58d2e3911 | ||
|
|
266fca2e5c | ||
|
|
9d01a7b91a | ||
|
|
a329da97f4 | ||
|
|
f45900d7e6 | ||
|
|
00490199f1 | ||
|
|
17b503b389 | ||
|
|
ea735c341f | ||
|
|
064ecc37fd | ||
|
|
90c7a4e886 | ||
|
|
c63fa996d1 | ||
|
|
5457d76bcb | ||
|
|
c4519c7552 | ||
|
|
656747bcf1 | ||
|
|
2039ab182d | ||
|
|
5a709a2dbf | ||
|
|
51ce3bc269 | ||
|
|
14b62e6904 | ||
|
|
d3061de1bf | ||
|
|
58782ceef2 | ||
|
|
0b9a2fce2d | ||
|
|
749ad8a1ff | ||
|
|
9746d03ccb | ||
|
|
4dfd87a302 | ||
|
|
20854fb285 | ||
|
|
be15f6c80c | ||
|
|
0ea4ce1bb4 | ||
|
|
6c381b0e03 | ||
|
|
7798e6638b | ||
|
|
e3783c244f | ||
|
|
fee28ca66f | ||
|
|
f9a042f114 | ||
|
|
57b93f6ea3 | ||
|
|
fcd628424a | ||
|
|
526686fdc9 | ||
|
|
95b18582ec | ||
|
|
4252a6df31 | ||
|
|
13248bab57 | ||
|
|
80e8b9601d | ||
|
|
0bd3f3757f | ||
|
|
084eeb945e | ||
|
|
92bc522e85 | ||
|
|
e58a4a7282 | ||
|
|
ebe2e9da14 | ||
|
|
1a23797e82 | ||
|
|
16435f3561 | ||
|
|
db22b7f402 | ||
|
|
a8ccdb02e9 | ||
|
|
144c45229e | ||
|
|
85271f9dd9 | ||
|
|
99786797c7 | ||
|
|
b82c676ce5 | ||
|
|
e555702653 | ||
|
|
68bdffebe6 | ||
|
|
285143dc66 | ||
|
|
beae53e81b | ||
|
|
0d6a27d394 | ||
|
|
0b549321e9 | ||
|
|
33fdc2112f | ||
|
|
aa4e4f18a4 | ||
|
|
9c4a24148a | ||
|
|
71a8900013 | ||
|
|
7c0574eeb4 | ||
|
|
0b58f3975b | ||
|
|
d2201eb1fe | ||
|
|
9661af5049 | ||
|
|
5abdaca55b | ||
|
|
f528dc5300 | ||
|
|
d3d68f45fd | ||
|
|
b2b1bc9ba8 | ||
|
|
f8aed4826b | ||
|
|
5102538fc3 | ||
|
|
3908cd9d16 | ||
|
|
45760cd53c | ||
|
|
716a2fbea0 | ||
|
|
1790d108e7 | ||
|
|
ef8408c797 | ||
|
|
987cab74d7 | ||
|
|
2eebcee522 | ||
|
|
d85207f179 | ||
|
|
661deb8eaf | ||
|
|
041f3e9df0 | ||
|
|
768748ec2d | ||
|
|
31202ec210 | ||
|
|
344a772ad5 | ||
|
|
dd9d1530da | ||
|
|
a09c45396e | ||
|
|
0351bd5f28 | ||
|
|
0ec153ee1c | ||
|
|
6397654e22 | ||
|
|
6bafe2602e | ||
|
|
9411c62756 | ||
|
|
8614b98f6b | ||
|
|
ecd23df4ca | ||
|
|
3d8139dc27 | ||
|
|
beea7180f3 | ||
|
|
8e786c1cfc | ||
|
|
9b97dd11e2 | ||
|
|
069a8d0b5d | ||
|
|
9907c2e9fa | ||
|
|
3976fd83ee | ||
|
|
2ac835f764 | ||
|
|
52b82cbe40 | ||
|
|
7ddb527573 | ||
|
|
bac13201ae | ||
|
|
0a3b9ce701 | ||
|
|
7d5f5ad772 | ||
|
|
a3d3d49c7f | ||
|
|
d9551dda1a | ||
|
|
ee7608f7cf | ||
|
|
e329316d44 | ||
|
|
9479bb8a5b | ||
|
|
88a0002f7e | ||
|
|
6e9d57a953 | ||
|
|
3b7d1f7be2 | ||
|
|
1f517499ef | ||
|
|
b3f5dd73da | ||
|
|
a37b858993 | ||
|
|
09c56c8ba8 | ||
|
|
6e3349b55c | ||
|
|
2162837416 | ||
|
|
b9f6a908f7 | ||
|
|
4b5551d230 | ||
|
|
e1505b7143 | ||
|
|
6611983038 | ||
|
|
d7ca10e22f | ||
|
|
dc3c8f79c4 | ||
|
|
3ee477fc5b | ||
|
|
25834afe9a | ||
|
|
7caaf434e9 | ||
|
|
edf13bd91d | ||
|
|
20dddd1819 | ||
|
|
8ec4c0abb3 | ||
|
|
ab45d20630 | ||
|
|
21841af612 | ||
|
|
98da9b943c | ||
|
|
6a1bc7d780 | ||
|
|
bdfdcebafb | ||
|
|
1e4935cf3e | ||
|
|
e63608fced | ||
|
|
d6c1b54289 | ||
|
|
594b03c275 | ||
|
|
18e4da1903 | ||
|
|
7a199276fb | ||
|
|
63c4d8f68f | ||
|
|
afcdd90b77 | ||
|
|
ae6ad1c04a | ||
|
|
301ec28a65 | ||
|
|
5b842ade1d | ||
|
|
cf947fee17 | ||
|
|
73f0594704 | ||
|
|
2daf7ed02e | ||
|
|
38e8fea828 | ||
|
|
536dc8653b | ||
|
|
a705dfc63a | ||
|
|
9fba9de0b5 | ||
|
|
6c3005e412 | ||
|
|
40b310c208 | ||
|
|
edb7214e6c | ||
|
|
48b0b7fe6d | ||
|
|
e0cbef0dce | ||
|
|
14832c5547 |
@@ -371,7 +371,7 @@ function getZigAgent(platform, options) {
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getTestAgent(platform, options) {
|
||||
const { os, arch } = platform;
|
||||
const { os, arch, profile } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -391,6 +391,13 @@ function getTestAgent(platform, options) {
|
||||
}
|
||||
|
||||
if (arch === "aarch64") {
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -398,6 +405,13 @@ function getTestAgent(platform, options) {
|
||||
});
|
||||
}
|
||||
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -538,6 +552,7 @@ function getLinkBunStep(platform, options) {
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
|
||||
@@ -601,6 +616,9 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
},
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,3 +3,7 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
2
.github/actions/bump/action.yml
vendored
2
.github/actions/bump/action.yml
vendored
@@ -25,7 +25,7 @@ runs:
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
19
.github/workflows/auto-assign-types.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
63
.github/workflows/labeled.yml
vendored
63
.github/workflows/labeled.yml
vendored
@@ -105,11 +105,16 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -118,6 +123,10 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -127,9 +136,32 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -163,8 +195,17 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -178,8 +219,22 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
89
.github/workflows/on-submodule-update.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-hdrhistogram.yml
vendored
4
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
@@ -83,7 +83,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-highway.yml
vendored
4
.github/workflows/update-highway.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
6
.github/workflows/update-lolhtml.yml
vendored
6
.github/workflows/update-lolhtml.yml
vendored
@@ -55,12 +55,12 @@ jobs:
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
4
.github/workflows/update-lshpack.yml
vendored
4
.github/workflows/update-lshpack.yml
vendored
@@ -59,7 +59,7 @@ jobs:
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
@@ -97,7 +97,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
79
.github/workflows/update-vendor.yml
vendored
Normal file
79
.github/workflows/update-vendor.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
5
.github/workflows/vscode-release.yml
vendored
5
.github/workflows/vscode-release.yml
vendored
@@ -45,3 +45,8 @@ jobs:
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
path: packages/bun-vscode/extension/bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
|
||||
11
.vscode/launch.json
generated
vendored
11
.vscode/launch.json
generated
vendored
@@ -25,6 +25,9 @@
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
@@ -57,11 +60,17 @@
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
10
CLAUDE.md
10
CLAUDE.md
@@ -4,18 +4,14 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build debug version**: `bun bd`
|
||||
- **Build Bun**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: DO NOT set a build timeout. Compilation takes ~5 minutes. Be patient.
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
|
||||
## Testing
|
||||
|
||||
|
||||
@@ -31,6 +31,11 @@ include(SetupCcache)
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -43,6 +48,9 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
@@ -21,7 +21,7 @@ $ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
$ sudo dnf install cargo clang19 llvm19 lld19 ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
import postgres from "postgres";
|
||||
const sql = isBun ? Bun.sql : postgres;
|
||||
const sql = isBun ? Bun.sql : postgres();
|
||||
|
||||
// Create the table if it doesn't exist
|
||||
await sql`
|
||||
|
||||
21
build.zig
21
build.zig
@@ -48,6 +48,7 @@ const BunBuildOptions = struct {
|
||||
/// enable debug logs in release builds
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_valgrind: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -94,6 +95,7 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
@@ -213,26 +215,21 @@ pub fn build(b: *Build) !void {
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
|
||||
.os = os,
|
||||
.arch = arch,
|
||||
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.no_llvm = no_llvm,
|
||||
.override_no_export_cpp_apis = override_no_export_cpp_apis,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
const rev = b.option(u32, "canary", "Treat this as a canary build") orelse 0;
|
||||
break :canary if (rev == 0) null else rev;
|
||||
},
|
||||
|
||||
.reported_nodejs_version = try Version.parse(
|
||||
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
|
||||
"0.0.0-unset",
|
||||
),
|
||||
|
||||
.sha = sha: {
|
||||
const sha_buildoption = b.option([]const u8, "sha", "Force the git sha");
|
||||
const sha_github = b.graph.env_map.get("GITHUB_SHA");
|
||||
@@ -268,10 +265,10 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
break :sha sha;
|
||||
},
|
||||
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
};
|
||||
|
||||
// zig build obj
|
||||
@@ -500,6 +497,7 @@ fn addMultiCheck(
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
@@ -587,9 +585,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
.root_module = root,
|
||||
});
|
||||
configureObj(b, opts, obj);
|
||||
if (enableFastBuild(b)) obj.root_module.strip = true;
|
||||
return obj;
|
||||
}
|
||||
|
||||
fn enableFastBuild(b: *Build) bool {
|
||||
const val = b.graph.env_map.get("BUN_BUILD_FAST") orelse return false;
|
||||
return std.mem.eql(u8, val, "1");
|
||||
}
|
||||
|
||||
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
// Flags on root module get used for the compilation
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
@@ -600,7 +604,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
// Object options
|
||||
obj.use_llvm = !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
|
||||
if (opts.enable_asan) {
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
obj.root_module.sanitize_address = true;
|
||||
} else {
|
||||
@@ -630,7 +634,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
obj.link_function_sections = true;
|
||||
obj.link_data_sections = true;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
if (opts.optimize == .Debug and opts.enable_valgrind) {
|
||||
obj.root_module.valgrind = true;
|
||||
}
|
||||
}
|
||||
@@ -739,6 +743,7 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
.{ .file = "node-fallbacks/url.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/util.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/zlib.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "eval/feedback.ts", .enable = opts.shouldEmbedCode() },
|
||||
}) |entry| {
|
||||
if (!@hasField(@TypeOf(entry), "enable") or entry.enable) {
|
||||
const path = b.pathJoin(&.{ opts.codegen_path, entry.file });
|
||||
|
||||
@@ -60,10 +60,10 @@ endif()
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
|
||||
|
||||
if(ENABLE_WINDOWS_CODESIGNING)
|
||||
message(STATUS "Windows code signing: ENABLED")
|
||||
|
||||
|
||||
# Check for required environment variables
|
||||
if(NOT DEFINED ENV{SM_API_KEY})
|
||||
message(WARNING "SM_API_KEY not set - code signing may fail")
|
||||
@@ -114,8 +114,10 @@ endif()
|
||||
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
set(DEFAULT_ASAN ON)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": ["src/js/**/*.{js,ts}"]
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
@@ -1,506 +0,0 @@
|
||||
packages/bun-usockets/src/crypto/root_certs.cpp
|
||||
packages/bun-usockets/src/crypto/sni_tree.cpp
|
||||
src/bake/BakeGlobalObject.cpp
|
||||
src/bake/BakeProduction.cpp
|
||||
src/bake/BakeSourceProvider.cpp
|
||||
src/bun.js/bindings/ActiveDOMCallback.cpp
|
||||
src/bun.js/bindings/AsymmetricKeyValue.cpp
|
||||
src/bun.js/bindings/AsyncContextFrame.cpp
|
||||
src/bun.js/bindings/Base64Helpers.cpp
|
||||
src/bun.js/bindings/bindings.cpp
|
||||
src/bun.js/bindings/blob.cpp
|
||||
src/bun.js/bindings/bun-simdutf.cpp
|
||||
src/bun.js/bindings/bun-spawn.cpp
|
||||
src/bun.js/bindings/BunClientData.cpp
|
||||
src/bun.js/bindings/BunCommonStrings.cpp
|
||||
src/bun.js/bindings/BunDebugger.cpp
|
||||
src/bun.js/bindings/BunGCOutputConstraint.cpp
|
||||
src/bun.js/bindings/BunGlobalScope.cpp
|
||||
src/bun.js/bindings/BunHttp2CommonStrings.cpp
|
||||
src/bun.js/bindings/BunInjectedScriptHost.cpp
|
||||
src/bun.js/bindings/BunInspector.cpp
|
||||
src/bun.js/bindings/BunJSCEventLoop.cpp
|
||||
src/bun.js/bindings/BunObject.cpp
|
||||
src/bun.js/bindings/BunPlugin.cpp
|
||||
src/bun.js/bindings/BunProcess.cpp
|
||||
src/bun.js/bindings/BunString.cpp
|
||||
src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
src/bun.js/bindings/CookieMap.cpp
|
||||
src/bun.js/bindings/coroutine.cpp
|
||||
src/bun.js/bindings/CPUFeatures.cpp
|
||||
src/bun.js/bindings/decodeURIComponentSIMD.cpp
|
||||
src/bun.js/bindings/DOMException.cpp
|
||||
src/bun.js/bindings/DOMFormData.cpp
|
||||
src/bun.js/bindings/DOMURL.cpp
|
||||
src/bun.js/bindings/DOMWrapperWorld.cpp
|
||||
src/bun.js/bindings/DoubleFormatter.cpp
|
||||
src/bun.js/bindings/EncodeURIComponent.cpp
|
||||
src/bun.js/bindings/EncodingTables.cpp
|
||||
src/bun.js/bindings/ErrorCode.cpp
|
||||
src/bun.js/bindings/ErrorStackFrame.cpp
|
||||
src/bun.js/bindings/ErrorStackTrace.cpp
|
||||
src/bun.js/bindings/EventLoopTaskNoContext.cpp
|
||||
src/bun.js/bindings/ExposeNodeModuleGlobals.cpp
|
||||
src/bun.js/bindings/ffi.cpp
|
||||
src/bun.js/bindings/helpers.cpp
|
||||
src/bun.js/bindings/highway_strings.cpp
|
||||
src/bun.js/bindings/HTMLEntryPoint.cpp
|
||||
src/bun.js/bindings/ImportMetaObject.cpp
|
||||
src/bun.js/bindings/inlines.cpp
|
||||
src/bun.js/bindings/InspectorBunFrontendDevServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorHTTPServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorLifecycleAgent.cpp
|
||||
src/bun.js/bindings/InspectorTestReporterAgent.cpp
|
||||
src/bun.js/bindings/InternalForTesting.cpp
|
||||
src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
src/bun.js/bindings/IPC.cpp
|
||||
src/bun.js/bindings/isBuiltinModule.cpp
|
||||
src/bun.js/bindings/JS2Native.cpp
|
||||
src/bun.js/bindings/JSBigIntBinding.cpp
|
||||
src/bun.js/bindings/JSBuffer.cpp
|
||||
src/bun.js/bindings/JSBufferEncodingType.cpp
|
||||
src/bun.js/bindings/JSBufferList.cpp
|
||||
src/bun.js/bindings/JSBundlerPlugin.cpp
|
||||
src/bun.js/bindings/JSBunRequest.cpp
|
||||
src/bun.js/bindings/JSCommonJSExtensions.cpp
|
||||
src/bun.js/bindings/JSCommonJSModule.cpp
|
||||
src/bun.js/bindings/JSCTaskScheduler.cpp
|
||||
src/bun.js/bindings/JSCTestingHelpers.cpp
|
||||
src/bun.js/bindings/JSDOMExceptionHandling.cpp
|
||||
src/bun.js/bindings/JSDOMFile.cpp
|
||||
src/bun.js/bindings/JSDOMGlobalObject.cpp
|
||||
src/bun.js/bindings/JSDOMWrapper.cpp
|
||||
src/bun.js/bindings/JSDOMWrapperCache.cpp
|
||||
src/bun.js/bindings/JSEnvironmentVariableMap.cpp
|
||||
src/bun.js/bindings/JSFFIFunction.cpp
|
||||
src/bun.js/bindings/JSMockFunction.cpp
|
||||
src/bun.js/bindings/JSNextTickQueue.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSecrets.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
src/bun.js/bindings/JSStringDecoder.cpp
|
||||
src/bun.js/bindings/JSWrappingFunction.cpp
|
||||
src/bun.js/bindings/JSX509Certificate.cpp
|
||||
src/bun.js/bindings/JSX509CertificateConstructor.cpp
|
||||
src/bun.js/bindings/JSX509CertificatePrototype.cpp
|
||||
src/bun.js/bindings/linux_perf_tracing.cpp
|
||||
src/bun.js/bindings/MarkedArgumentBufferBinding.cpp
|
||||
src/bun.js/bindings/MarkingConstraint.cpp
|
||||
src/bun.js/bindings/ModuleLoader.cpp
|
||||
src/bun.js/bindings/napi_external.cpp
|
||||
src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDhKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenEcKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenNidKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenRsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoHkdf.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeygen.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeys.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoPrimes.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoSignJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoUtil.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipher.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellman.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroup.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDH.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHash.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHmac.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
src/bun.js/bindings/NodeFetch.cpp
|
||||
src/bun.js/bindings/NodeFSStatBinding.cpp
|
||||
src/bun.js/bindings/NodeFSStatFSBinding.cpp
|
||||
src/bun.js/bindings/NodeHTTP.cpp
|
||||
src/bun.js/bindings/NodeTimerObject.cpp
|
||||
src/bun.js/bindings/NodeTLS.cpp
|
||||
src/bun.js/bindings/NodeURL.cpp
|
||||
src/bun.js/bindings/NodeValidator.cpp
|
||||
src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
src/bun.js/bindings/OsBinding.cpp
|
||||
src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
src/bun.js/bindings/ProcessIdentifier.cpp
|
||||
src/bun.js/bindings/RegularExpression.cpp
|
||||
src/bun.js/bindings/S3Error.cpp
|
||||
src/bun.js/bindings/ScriptExecutionContext.cpp
|
||||
src/bun.js/bindings/SecretsDarwin.cpp
|
||||
src/bun.js/bindings/SecretsLinux.cpp
|
||||
src/bun.js/bindings/SecretsWindows.cpp
|
||||
src/bun.js/bindings/Serialization.cpp
|
||||
src/bun.js/bindings/ServerRouteList.cpp
|
||||
src/bun.js/bindings/spawn.cpp
|
||||
src/bun.js/bindings/SQLClient.cpp
|
||||
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
|
||||
src/bun.js/bindings/StringBuilderBinding.cpp
|
||||
src/bun.js/bindings/stripANSI.cpp
|
||||
src/bun.js/bindings/Strong.cpp
|
||||
src/bun.js/bindings/TextCodec.cpp
|
||||
src/bun.js/bindings/TextCodecCJK.cpp
|
||||
src/bun.js/bindings/TextCodecReplacement.cpp
|
||||
src/bun.js/bindings/TextCodecSingleByte.cpp
|
||||
src/bun.js/bindings/TextCodecUserDefined.cpp
|
||||
src/bun.js/bindings/TextCodecWrapper.cpp
|
||||
src/bun.js/bindings/TextEncoding.cpp
|
||||
src/bun.js/bindings/TextEncodingRegistry.cpp
|
||||
src/bun.js/bindings/Uint8Array.cpp
|
||||
src/bun.js/bindings/Undici.cpp
|
||||
src/bun.js/bindings/URLDecomposition.cpp
|
||||
src/bun.js/bindings/URLSearchParams.cpp
|
||||
src/bun.js/bindings/UtilInspect.cpp
|
||||
src/bun.js/bindings/v8/node.cpp
|
||||
src/bun.js/bindings/v8/shim/Function.cpp
|
||||
src/bun.js/bindings/v8/shim/FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/GlobalInternals.cpp
|
||||
src/bun.js/bindings/v8/shim/Handle.cpp
|
||||
src/bun.js/bindings/v8/shim/HandleScopeBuffer.cpp
|
||||
src/bun.js/bindings/v8/shim/InternalFieldObject.cpp
|
||||
src/bun.js/bindings/v8/shim/Map.cpp
|
||||
src/bun.js/bindings/v8/shim/ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/Oddball.cpp
|
||||
src/bun.js/bindings/v8/shim/TaggedPointer.cpp
|
||||
src/bun.js/bindings/v8/v8_api_internal.cpp
|
||||
src/bun.js/bindings/v8/v8_internal.cpp
|
||||
src/bun.js/bindings/v8/V8Array.cpp
|
||||
src/bun.js/bindings/v8/V8Boolean.cpp
|
||||
src/bun.js/bindings/v8/V8Context.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScopeBase.cpp
|
||||
src/bun.js/bindings/v8/V8External.cpp
|
||||
src/bun.js/bindings/v8/V8Function.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8HandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8Isolate.cpp
|
||||
src/bun.js/bindings/v8/V8Local.cpp
|
||||
src/bun.js/bindings/v8/V8Maybe.cpp
|
||||
src/bun.js/bindings/v8/V8Number.cpp
|
||||
src/bun.js/bindings/v8/V8Object.cpp
|
||||
src/bun.js/bindings/v8/V8ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8String.cpp
|
||||
src/bun.js/bindings/v8/V8Template.cpp
|
||||
src/bun.js/bindings/v8/V8Value.cpp
|
||||
src/bun.js/bindings/Weak.cpp
|
||||
src/bun.js/bindings/webcore/AbortController.cpp
|
||||
src/bun.js/bindings/webcore/AbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/ActiveDOMObject.cpp
|
||||
src/bun.js/bindings/webcore/BroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/BunBroadcastChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/CloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/CommonAtomStrings.cpp
|
||||
src/bun.js/bindings/webcore/ContextDestructionObserver.cpp
|
||||
src/bun.js/bindings/webcore/CustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/CustomEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/DOMJITHelpers.cpp
|
||||
src/bun.js/bindings/webcore/ErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/ErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/Event.cpp
|
||||
src/bun.js/bindings/webcore/EventContext.cpp
|
||||
src/bun.js/bindings/webcore/EventDispatcher.cpp
|
||||
src/bun.js/bindings/webcore/EventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/EventFactory.cpp
|
||||
src/bun.js/bindings/webcore/EventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/EventNames.cpp
|
||||
src/bun.js/bindings/webcore/EventPath.cpp
|
||||
src/bun.js/bindings/webcore/EventTarget.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetConcrete.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetFactory.cpp
|
||||
src/bun.js/bindings/webcore/FetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderField.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderIdentifiers.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderMap.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderNames.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderStrings.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderValues.cpp
|
||||
src/bun.js/bindings/webcore/HTTPParsers.cpp
|
||||
src/bun.js/bindings/webcore/IdentifierEventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/InternalWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortAlgorithm.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortController.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignalCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSAddEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSBroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSByteLengthQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCallbackData.cpp
|
||||
src/bun.js/bindings/webcore/JSCloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSCookie.cpp
|
||||
src/bun.js/bindings/webcore/JSCookieMap.cpp
|
||||
src/bun.js/bindings/webcore/JSCountQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBindingInternalsBuiltins.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBuiltinConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertDate.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertNumbers.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertStrings.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertWebGL.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMException.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMFormData.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMGuardedObject.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMIterator.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMOperation.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromise.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromiseDeferred.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMURL.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorHandler.cpp
|
||||
src/bun.js/bindings/webcore/JSEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventDOMJIT.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListener.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSEventModifierInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTarget.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetNode.cpp
|
||||
src/bun.js/bindings/webcore/JSFetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannelCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePort.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePortCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEBindings.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEParams.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEType.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformance.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntryCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMarkOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasureOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableByteStreamController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBRequest.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSourceCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSStructuredSerializeOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSTextDecoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWasmStreamingCompiler.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultWriter.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/MessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/MessagePort.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProvider.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProviderImpl.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/NetworkLoadMetrics.cpp
|
||||
src/bun.js/bindings/webcore/Performance.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceUserTiming.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/ResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/RFC7230.cpp
|
||||
src/bun.js/bindings/webcore/SerializedScriptValue.cpp
|
||||
src/bun.js/bindings/webcore/ServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/ServerTimingParser.cpp
|
||||
src/bun.js/bindings/webcore/StructuredClone.cpp
|
||||
src/bun.js/bindings/webcore/TextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/WebCoreTypedArrayController.cpp
|
||||
src/bun.js/bindings/webcore/WebSocket.cpp
|
||||
src/bun.js/bindings/webcore/Worker.cpp
|
||||
src/bun.js/bindings/webcore/WritableStream.cpp
|
||||
src/bun.js/bindings/webcrypto/CommonCryptoDERUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBCOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFB.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFBOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTR.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTROpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCM.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCMOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KW.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KWOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDH.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDHOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDF.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDFOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyEC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyECOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRaw.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAComponents.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCbcCfbParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCtrParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesGcmParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAesKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAlgorithmParameters.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoEcKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoHmacKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyPair.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaHashedKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdhKeyDeriveParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdsaParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHkdfParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHmacKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSJsonWebKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSPbkdf2Params.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedImportParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
|
||||
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
|
||||
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
|
||||
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/SubtleCrypto.cpp
|
||||
src/bun.js/bindings/workaround-missing-symbols.cpp
|
||||
src/bun.js/bindings/wtf-bindings.cpp
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp
|
||||
src/bun.js/bindings/ZigGlobalObject.cpp
|
||||
src/bun.js/bindings/ZigSourceProvider.cpp
|
||||
src/bun.js/modules/NodeModuleModule.cpp
|
||||
src/bun.js/modules/NodeTTYModule.cpp
|
||||
src/bun.js/modules/NodeUtilTypesModule.cpp
|
||||
src/bun.js/modules/ObjectModule.cpp
|
||||
src/deps/libuwsockets.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/vm/Semaphore.cpp
|
||||
src/vm/SigintWatcher.cpp
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -2,6 +2,8 @@ include(PathUtils)
|
||||
|
||||
if(DEBUG)
|
||||
set(bun bun-debug)
|
||||
elseif(ENABLE_ASAN AND ENABLE_VALGRIND)
|
||||
set(bun bun-asan-valgrind)
|
||||
elseif(ENABLE_ASAN)
|
||||
set(bun bun-asan)
|
||||
elseif(ENABLE_VALGRIND)
|
||||
@@ -619,6 +621,7 @@ register_command(
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -636,6 +639,7 @@ register_command(
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
${CWD}/src/install/PackageManager/scanner-entry.ts # Is there a better way to do this?
|
||||
)
|
||||
|
||||
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
|
||||
@@ -885,12 +889,8 @@ if(NOT WIN32)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_link_libraries(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_compile_options(${bun} PUBLIC -fsanitize=address)
|
||||
target_link_libraries(${bun} PUBLIC -fsanitize=address)
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC
|
||||
@@ -929,12 +929,8 @@ if(NOT WIN32)
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_link_libraries(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_compile_options(${bun} PUBLIC -fsanitize=address)
|
||||
target_link_libraries(${bun} PUBLIC -fsanitize=address)
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
@@ -968,6 +964,7 @@ if(WIN32)
|
||||
/delayload:WSOCK32.dll
|
||||
/delayload:ADVAPI32.dll
|
||||
/delayload:IPHLPAPI.dll
|
||||
/delayload:CRYPT32.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -1009,6 +1006,7 @@ if(LINUX)
|
||||
-Wl,--wrap=exp2
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=gettid
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
@@ -1060,7 +1058,7 @@ if(LINUX)
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN)
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN AND NOT ENABLE_VALGRIND)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
)
|
||||
@@ -1125,6 +1123,9 @@ endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
if(NOT WEBKIT_LOCAL AND NOT APPLE)
|
||||
include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode)
|
||||
endif()
|
||||
@@ -1184,6 +1185,7 @@ if(WIN32)
|
||||
ntdll
|
||||
userenv
|
||||
dbghelp
|
||||
crypt32
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
@@ -1359,12 +1361,20 @@ if(NOT BUN_CPP_ONLY)
|
||||
if(ENABLE_BASELINE)
|
||||
set(bunTriplet ${bunTriplet}-baseline)
|
||||
endif()
|
||||
if(ENABLE_ASAN)
|
||||
|
||||
if (ENABLE_ASAN AND ENABLE_VALGRIND)
|
||||
set(bunTriplet ${bunTriplet}-asan-valgrind)
|
||||
set(bunPath ${bunTriplet})
|
||||
elseif (ENABLE_VALGRIND)
|
||||
set(bunTriplet ${bunTriplet}-valgrind)
|
||||
set(bunPath ${bunTriplet})
|
||||
elseif(ENABLE_ASAN)
|
||||
set(bunTriplet ${bunTriplet}-asan)
|
||||
set(bunPath ${bunTriplet})
|
||||
else()
|
||||
string(REPLACE bun ${bunTriplet} bunPath ${bun})
|
||||
endif()
|
||||
|
||||
set(bunFiles ${bunExe} features.json)
|
||||
if(WIN32)
|
||||
list(APPEND bunFiles ${bun}.pdb)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
8dcce8f68512fca460b171bccc3a5afce0048779
|
||||
be60a9987ee48d0abf0d7b6a175bad8d6c1585d1
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,8 @@ register_repository(
|
||||
REPOSITORY
|
||||
libuv/libuv
|
||||
COMMIT
|
||||
da527d8d2a908b824def74382761566371439003
|
||||
# Corresponds to v1.51.0
|
||||
5152db2cbfeb5582e9c27c5ea1dba2cd9e10759b
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
220
cmake/tools/GenerateDependencyVersions.cmake
Normal file
220
cmake/tools/GenerateDependencyVersions.cmake
Normal file
@@ -0,0 +1,220 @@
|
||||
# GenerateDependencyVersions.cmake
|
||||
# Generates a header file with all dependency versions
|
||||
|
||||
# Function to extract version from git tree object
|
||||
function(get_git_tree_hash dep_name output_var)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD:./src/deps/${dep_name}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE commit_hash
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
if(result EQUAL 0 AND commit_hash)
|
||||
set(${output_var} "${commit_hash}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Function to extract version from header file using regex
|
||||
function(extract_version_from_header header_file regex_pattern output_var)
|
||||
if(EXISTS "${header_file}")
|
||||
file(STRINGS "${header_file}" version_line REGEX "${regex_pattern}")
|
||||
if(version_line)
|
||||
string(REGEX MATCH "${regex_pattern}" _match "${version_line}")
|
||||
if(CMAKE_MATCH_1)
|
||||
set(${output_var} "${CMAKE_MATCH_1}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Main function to generate the header file
|
||||
function(generate_dependency_versions_header)
|
||||
set(DEPS_PATH "${CMAKE_SOURCE_DIR}/src/deps")
|
||||
set(VENDOR_PATH "${CMAKE_SOURCE_DIR}/vendor")
|
||||
|
||||
# Initialize version variables
|
||||
set(DEPENDENCY_VERSIONS "")
|
||||
|
||||
# WebKit version (from SetupWebKit.cmake or command line)
|
||||
if(WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION_STR "${WEBKIT_VERSION}")
|
||||
else()
|
||||
set(WEBKIT_VERSION_STR "0ddf6f47af0a9782a354f61e06d7f83d097d9f84")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "WEBKIT" "${WEBKIT_VERSION_STR}")
|
||||
|
||||
# Track input files so CMake reconfigures when they change
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS
|
||||
"${CMAKE_SOURCE_DIR}/package.json"
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
)
|
||||
|
||||
# Hardcoded dependency versions (previously from generated_versions_list.zig)
|
||||
# These are the commit hashes/tree objects for each dependency
|
||||
list(APPEND DEPENDENCY_VERSIONS "BORINGSSL" "29a2cd359458c9384694b75456026e4b57e3e567")
|
||||
list(APPEND DEPENDENCY_VERSIONS "C_ARES" "d1722e6e8acaf10eb73fa995798a9cd421d9f85e")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBARCHIVE" "898dc8319355b7e985f68a9819f182aaed61b53a")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_HASH" "dc76454a39e7e83b68c3704b6e3784654f8d5ac5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LOLHTML" "8d4c273ded322193d017042d1f48df2766b0f88b")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LSHPACK" "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "MIMALLOC" "4c283af60cdae205df5a872530c77e2a6a307d43")
|
||||
list(APPEND DEPENDENCY_VERSIONS "PICOHTTPPARSER" "066d2b1e9ab820703db0837a7255d92d30f0c9f5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "TINYCC" "ab631362d839333660a265d3084d8ff060b96753")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_HASH" "886098f3f339617b4243b286f5ed364b9989e245")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_HASH" "794ea1b0afca0f020f4e57b6732332231fb23c70")
|
||||
|
||||
# Extract semantic versions from header files where available
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"#define LIBDEFLATE_VERSION_STRING[ \t]+\"([0-9\\.]+)\""
|
||||
LIBDEFLATE_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_VERSION" "${LIBDEFLATE_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"#define[ \t]+ZLIB_VERSION[ \t]+\"([^\"]+)\""
|
||||
ZLIB_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_VERSION" "${ZLIB_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
"#define[ \t]+ZSTD_VERSION_STRING[ \t]+\"([^\"]+)\""
|
||||
ZSTD_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_VERSION" "${ZSTD_VERSION_STRING}")
|
||||
|
||||
# Bun version from package.json
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/package.json")
|
||||
file(READ "${CMAKE_SOURCE_DIR}/package.json" PACKAGE_JSON)
|
||||
string(REGEX MATCH "\"version\"[ \t]*:[ \t]*\"([^\"]+)\"" _ ${PACKAGE_JSON})
|
||||
if(CMAKE_MATCH_1)
|
||||
set(BUN_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
set(BUN_VERSION_STRING "unknown")
|
||||
endif()
|
||||
else()
|
||||
set(BUN_VERSION_STRING "${VERSION}")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "BUN_VERSION" "${BUN_VERSION_STRING}")
|
||||
|
||||
# Node.js compatibility version (hardcoded as in the current implementation)
|
||||
set(NODEJS_COMPAT_VERSION "22.12.0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "NODEJS_COMPAT_VERSION" "${NODEJS_COMPAT_VERSION}")
|
||||
|
||||
# Get Bun's git SHA for uws/usockets versions (they use Bun's own SHA)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE BUN_GIT_SHA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
)
|
||||
if(NOT BUN_GIT_SHA)
|
||||
set(BUN_GIT_SHA "unknown")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "UWS" "${BUN_GIT_SHA}")
|
||||
list(APPEND DEPENDENCY_VERSIONS "USOCKETS" "${BUN_GIT_SHA}")
|
||||
|
||||
# Zig version - hardcoded for now, can be updated as needed
|
||||
# This should match the version of Zig used to build Bun
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZIG" "0.14.1")
|
||||
|
||||
# Generate the header file content
|
||||
set(HEADER_CONTENT "// This file is auto-generated by CMake. Do not edit manually.\n")
|
||||
string(APPEND HEADER_CONTENT "#ifndef BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEPENDENCY_VERSIONS_H\n\n")
|
||||
string(APPEND HEADER_CONTENT "#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "extern \"C\" {\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "// Dependency versions\n")
|
||||
|
||||
# Process the version list
|
||||
list(LENGTH DEPENDENCY_VERSIONS num_versions)
|
||||
math(EXPR last_idx "${num_versions} - 1")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit #define if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEP_${name} \"${value}\"\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n")
|
||||
string(APPEND HEADER_CONTENT "// C string constants for easy access\n")
|
||||
|
||||
# Create C string constants
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit constant if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "static const char* const BUN_VERSION_${name} = \"${value}\";\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "}\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "#endif // BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
|
||||
# Write the header file only if content has changed
|
||||
set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions.h")
|
||||
|
||||
# Read existing content if file exists
|
||||
set(EXISTING_CONTENT "")
|
||||
if(EXISTS "${OUTPUT_FILE}")
|
||||
file(READ "${OUTPUT_FILE}" EXISTING_CONTENT)
|
||||
endif()
|
||||
|
||||
# Only write if content is different
|
||||
if(NOT "${EXISTING_CONTENT}" STREQUAL "${HEADER_CONTENT}")
|
||||
file(WRITE "${OUTPUT_FILE}" "${HEADER_CONTENT}")
|
||||
message(STATUS "Updated dependency versions header: ${OUTPUT_FILE}")
|
||||
else()
|
||||
message(STATUS "Dependency versions header unchanged: ${OUTPUT_FILE}")
|
||||
endif()
|
||||
|
||||
# Also create a more detailed version for debugging
|
||||
set(DEBUG_OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions_debug.txt")
|
||||
set(DEBUG_CONTENT "Bun Dependency Versions\n")
|
||||
string(APPEND DEBUG_CONTENT "=======================\n\n")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
string(APPEND DEBUG_CONTENT "${name}: ${value}\n")
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
file(WRITE "${DEBUG_OUTPUT_FILE}" "${DEBUG_CONTENT}")
|
||||
endfunction()
|
||||
|
||||
# Call the function to generate the header
|
||||
generate_dependency_versions_header()
|
||||
@@ -131,6 +131,9 @@ else()
|
||||
find_llvm_command(CMAKE_RANLIB llvm-ranlib)
|
||||
if(LINUX)
|
||||
find_llvm_command(LLD_PROGRAM ld.lld)
|
||||
# Ensure vendor dependencies use lld instead of ld
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_EXE_LINKER_FLAGS=--ld-path=${LLD_PROGRAM})
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_SHARED_LINKER_FLAGS=--ld-path=${LLD_PROGRAM})
|
||||
endif()
|
||||
if(APPLE)
|
||||
find_llvm_command(CMAKE_DSYMUTIL dsymutil)
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION f474428677de1fafaf13bb3b9a050fe3504dda25)
|
||||
set(WEBKIT_VERSION 69fa2714ab5f917c2d15501ff8cfdccfaea78882)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -90,6 +90,7 @@ register_command(
|
||||
-DZIG_PATH=${ZIG_PATH}
|
||||
-DZIG_COMMIT=${ZIG_COMMIT}
|
||||
-DENABLE_ASAN=${ENABLE_ASAN}
|
||||
-DENABLE_VALGRIND=${ENABLE_VALGRIND}
|
||||
-DZIG_COMPILER_SAFE=${ZIG_COMPILER_SAFE}
|
||||
-P ${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
SOURCES
|
||||
|
||||
@@ -122,7 +122,7 @@
|
||||
},
|
||||
{
|
||||
"name": "reporter",
|
||||
"description": "Specify the test reporter. Currently --reporter=junit is the only supported format.",
|
||||
"description": "Test output reporter format. Available: 'junit' (requires --reporter-outfile). Default: console output.",
|
||||
"hasValue": true,
|
||||
"valueType": "val",
|
||||
"required": false,
|
||||
@@ -130,7 +130,7 @@
|
||||
},
|
||||
{
|
||||
"name": "reporter-outfile",
|
||||
"description": "The output file used for the format from --reporter.",
|
||||
"description": "Output file path for the reporter format (required with --reporter).",
|
||||
"hasValue": true,
|
||||
"valueType": "val",
|
||||
"required": false,
|
||||
|
||||
@@ -665,7 +665,6 @@ _bun_test_completion() {
|
||||
'--timeout[Set the per-test timeout in milliseconds, default is 5000.]:timeout' \
|
||||
'--update-snapshots[Update snapshot files]' \
|
||||
'--rerun-each[Re-run each test file <NUMBER> times, helps catch certain bugs]:rerun' \
|
||||
'--only[Only run tests that are marked with "test.only()"]' \
|
||||
'--todo[Include tests that are marked with "test.todo()"]' \
|
||||
'--coverage[Generate a coverage profile]' \
|
||||
'--bail[Exit the test suite after <NUMBER> failures. If you do not specify a number, it defaults to 1.]:bail' \
|
||||
|
||||
@@ -184,6 +184,7 @@ Bun.hash.rapidhash("data", 1234);
|
||||
|
||||
- `"blake2b256"`
|
||||
- `"blake2b512"`
|
||||
- `"blake2s256"`
|
||||
- `"md4"`
|
||||
- `"md5"`
|
||||
- `"ripemd160"`
|
||||
|
||||
@@ -161,6 +161,102 @@ const randomTag = await redis.srandmember("tags");
|
||||
const poppedTag = await redis.spop("tags");
|
||||
```
|
||||
|
||||
## Pub/Sub
|
||||
|
||||
Bun provides native bindings for the [Redis
|
||||
Pub/Sub](https://redis.io/docs/latest/develop/pubsub/) protocol. **New in Bun
|
||||
1.2.23**
|
||||
|
||||
{% callout %}
|
||||
**🚧** — The Redis Pub/Sub feature is experimental. Although we expect it to be
|
||||
stable, we're currently actively looking for feedback and areas for improvement.
|
||||
{% /callout %}
|
||||
|
||||
### Basic Usage
|
||||
|
||||
To get started publishing messages, you can set up a publisher in
|
||||
`publisher.ts`:
|
||||
|
||||
```typescript#publisher.ts
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const writer = new RedisClient("redis://localhost:6739");
|
||||
await writer.connect();
|
||||
|
||||
writer.publish("general", "Hello everyone!");
|
||||
|
||||
writer.close();
|
||||
```
|
||||
|
||||
In another file, create the subscriber in `subscriber.ts`:
|
||||
|
||||
```typescript#subscriber.ts
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const listener = new RedisClient("redis://localhost:6739");
|
||||
await listener.connect();
|
||||
|
||||
await listener.subscribe("general", (message, channel) => {
|
||||
console.log(`Received: ${message}`);
|
||||
});
|
||||
```
|
||||
|
||||
In one shell, run your subscriber:
|
||||
|
||||
```bash
|
||||
bun run subscriber.ts
|
||||
```
|
||||
|
||||
and, in another, run your publisher:
|
||||
|
||||
```bash
|
||||
bun run publisher.ts
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note:** The subscription mode takes over the `RedisClient` connection. A
|
||||
client with subscriptions can only call `RedisClient.prototype.subscribe()`. In
|
||||
other words, applications which need to message Redis need a separate
|
||||
connection, acquirable through `.duplicate()`:
|
||||
|
||||
```typescript
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const redis = new RedisClient("redis://localhost:6379");
|
||||
await redis.connect();
|
||||
const subscriber = await redis.duplicate();
|
||||
|
||||
await subscriber.subscribe("foo", () => {});
|
||||
await redis.set("bar", "baz");
|
||||
```
|
||||
|
||||
{% /callout %}
|
||||
|
||||
### Publishing
|
||||
|
||||
Publishing messages is done through the `publish()` method:
|
||||
|
||||
```typescript
|
||||
await client.publish(channelName, message);
|
||||
```
|
||||
|
||||
### Subscriptions
|
||||
|
||||
The Bun `RedisClient` allows you to subscribe to channels through the
|
||||
`.subscribe()` method:
|
||||
|
||||
```typescript
|
||||
await client.subscribe(channel, (message, channel) => {});
|
||||
```
|
||||
|
||||
You can unsubscribe through the `.unsubscribe()` method:
|
||||
|
||||
```typescript
|
||||
await client.unsubscribe(); // Unsubscribe from all channels.
|
||||
await client.unsubscribe(channel); // Unsubscribe a particular channel.
|
||||
await client.unsubscribe(channel, listener); // Unsubscribe a particular listener.
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Command Execution and Pipelining
|
||||
@@ -482,9 +578,10 @@ When connecting to Redis servers using older versions that don't support RESP3,
|
||||
|
||||
Current limitations of the Redis client we are planning to address in future versions:
|
||||
|
||||
- [ ] No dedicated API for pub/sub functionality (though you can use the raw command API)
|
||||
- [ ] Transactions (MULTI/EXEC) must be done through raw commands for now
|
||||
- [ ] Streams are supported but without dedicated methods
|
||||
- [ ] Pub/Sub does not currently support binary data, nor pattern-based
|
||||
subscriptions.
|
||||
|
||||
Unsupported features:
|
||||
|
||||
|
||||
@@ -604,13 +604,12 @@ const db = new SQL({
|
||||
connectionTimeout: 30, // Timeout when establishing new connections
|
||||
|
||||
// SSL/TLS options
|
||||
ssl: "prefer", // or "disable", "require", "verify-ca", "verify-full"
|
||||
// tls: {
|
||||
// rejectUnauthorized: true,
|
||||
// ca: "path/to/ca.pem",
|
||||
// key: "path/to/key.pem",
|
||||
// cert: "path/to/cert.pem",
|
||||
// },
|
||||
tls: {
|
||||
rejectUnauthorized: true,
|
||||
ca: "path/to/ca.pem",
|
||||
key: "path/to/key.pem",
|
||||
cert: "path/to/cert.pem",
|
||||
},
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
|
||||
211
docs/api/yaml.md
211
docs/api/yaml.md
@@ -3,6 +3,7 @@ In Bun, YAML is a first-class citizen alongside JSON and TOML.
|
||||
Bun provides built-in support for YAML files through both runtime APIs and bundler integration. You can
|
||||
|
||||
- Parse YAML strings with `Bun.YAML.parse`
|
||||
- Stringify JavaScript objects to YAML with `Bun.YAML.stringify`
|
||||
- import & require YAML files as modules at runtime (including hot reloading & watch mode support)
|
||||
- import & require YAML files in frontend apps via bun's bundler
|
||||
|
||||
@@ -104,7 +105,7 @@ const data = Bun.YAML.parse(yaml);
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.YAML.parse()` throws a `SyntaxError` if the YAML is invalid:
|
||||
`Bun.YAML.parse()` throws an error if the YAML is invalid:
|
||||
|
||||
```ts
|
||||
try {
|
||||
@@ -114,6 +115,175 @@ try {
|
||||
}
|
||||
```
|
||||
|
||||
### `Bun.YAML.stringify()`
|
||||
|
||||
Convert a JavaScript value into a YAML string. The API signature matches `JSON.stringify`:
|
||||
|
||||
```ts
|
||||
YAML.stringify(value, replacer?, space?)
|
||||
```
|
||||
|
||||
- `value`: The value to convert to YAML
|
||||
- `replacer`: Currently only `null` or `undefined` (function replacers not yet supported)
|
||||
- `space`: Number of spaces for indentation (e.g., `2`) or a string to use for indentation. **Without this parameter, outputs flow-style (single-line) YAML**
|
||||
|
||||
#### Basic Usage
|
||||
|
||||
```ts
|
||||
import { YAML } from "bun";
|
||||
|
||||
const data = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
hobbies: ["reading", "coding"],
|
||||
};
|
||||
|
||||
// Without space - outputs flow-style (single-line) YAML
|
||||
console.log(YAML.stringify(data));
|
||||
// {name: John Doe,age: 30,hobbies: [reading,coding]}
|
||||
|
||||
// With space=2 - outputs block-style (multi-line) YAML
|
||||
console.log(YAML.stringify(data, null, 2));
|
||||
// name: John Doe
|
||||
// age: 30
|
||||
// hobbies:
|
||||
// - reading
|
||||
// - coding
|
||||
```
|
||||
|
||||
#### Output Styles
|
||||
|
||||
```ts
|
||||
const arr = [1, 2, 3];
|
||||
|
||||
// Flow style (single-line) - default
|
||||
console.log(YAML.stringify(arr));
|
||||
// [1,2,3]
|
||||
|
||||
// Block style (multi-line) - with indentation
|
||||
console.log(YAML.stringify(arr, null, 2));
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3
|
||||
```
|
||||
|
||||
#### String Quoting
|
||||
|
||||
`YAML.stringify()` automatically quotes strings when necessary:
|
||||
|
||||
- Strings that would be parsed as YAML keywords (`true`, `false`, `null`, `yes`, `no`, etc.)
|
||||
- Strings that would be parsed as numbers
|
||||
- Strings containing special characters or escape sequences
|
||||
|
||||
```ts
|
||||
const examples = {
|
||||
keyword: "true", // Will be quoted: "true"
|
||||
number: "123", // Will be quoted: "123"
|
||||
text: "hello world", // Won't be quoted: hello world
|
||||
empty: "", // Will be quoted: ""
|
||||
};
|
||||
|
||||
console.log(YAML.stringify(examples, null, 2));
|
||||
// keyword: "true"
|
||||
// number: "123"
|
||||
// text: hello world
|
||||
// empty: ""
|
||||
```
|
||||
|
||||
#### Cycles and References
|
||||
|
||||
`YAML.stringify()` automatically detects and handles circular references using YAML anchors and aliases:
|
||||
|
||||
```ts
|
||||
const obj = { name: "root" };
|
||||
obj.self = obj; // Circular reference
|
||||
|
||||
const yamlString = YAML.stringify(obj, null, 2);
|
||||
console.log(yamlString);
|
||||
// &root
|
||||
// name: root
|
||||
// self:
|
||||
// *root
|
||||
|
||||
// Objects with shared references
|
||||
const shared = { id: 1 };
|
||||
const data = {
|
||||
first: shared,
|
||||
second: shared,
|
||||
};
|
||||
|
||||
console.log(YAML.stringify(data, null, 2));
|
||||
// first:
|
||||
// &first
|
||||
// id: 1
|
||||
// second:
|
||||
// *first
|
||||
```
|
||||
|
||||
#### Special Values
|
||||
|
||||
```ts
|
||||
// Special numeric values
|
||||
console.log(YAML.stringify(Infinity)); // .inf
|
||||
console.log(YAML.stringify(-Infinity)); // -.inf
|
||||
console.log(YAML.stringify(NaN)); // .nan
|
||||
console.log(YAML.stringify(0)); // 0
|
||||
console.log(YAML.stringify(-0)); // -0
|
||||
|
||||
// null and undefined
|
||||
console.log(YAML.stringify(null)); // null
|
||||
console.log(YAML.stringify(undefined)); // undefined (returns undefined, not a string)
|
||||
|
||||
// Booleans
|
||||
console.log(YAML.stringify(true)); // true
|
||||
console.log(YAML.stringify(false)); // false
|
||||
```
|
||||
|
||||
#### Complex Objects
|
||||
|
||||
```ts
|
||||
const config = {
|
||||
server: {
|
||||
port: 3000,
|
||||
host: "localhost",
|
||||
ssl: {
|
||||
enabled: true,
|
||||
cert: "/path/to/cert.pem",
|
||||
key: "/path/to/key.pem",
|
||||
},
|
||||
},
|
||||
database: {
|
||||
connections: [
|
||||
{ name: "primary", host: "db1.example.com" },
|
||||
{ name: "replica", host: "db2.example.com" },
|
||||
],
|
||||
},
|
||||
features: {
|
||||
auth: true,
|
||||
"rate-limit": 100, // Keys with special characters are preserved
|
||||
},
|
||||
};
|
||||
|
||||
const yamlString = YAML.stringify(config, null, 2);
|
||||
console.log(yamlString);
|
||||
// server:
|
||||
// port: 3000
|
||||
// host: localhost
|
||||
// ssl:
|
||||
// enabled: true
|
||||
// cert: /path/to/cert.pem
|
||||
// key: /path/to/key.pem
|
||||
// database:
|
||||
// connections:
|
||||
// - name: primary
|
||||
// host: db1.example.com
|
||||
// - name: replica
|
||||
// host: db2.example.com
|
||||
// features:
|
||||
// auth: true
|
||||
// rate-limit: 100
|
||||
```
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
@@ -184,6 +354,45 @@ const { database, redis } = require("./config.yaml");
|
||||
console.log(database.port); // 5432
|
||||
```
|
||||
|
||||
### TypeScript Support
|
||||
|
||||
While Bun can import YAML files directly, TypeScript doesn't know the types of your YAML files by default. To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`):
|
||||
|
||||
```yaml#config.yaml
|
||||
features: "advanced"
|
||||
server:
|
||||
host: localhost
|
||||
port: 3000
|
||||
```
|
||||
|
||||
```ts#config.yaml.d.ts
|
||||
const contents: {
|
||||
features: string;
|
||||
server: {
|
||||
host: string;
|
||||
port: number;
|
||||
};
|
||||
};
|
||||
|
||||
export = contents;
|
||||
```
|
||||
|
||||
Now TypeScript will provide proper type checking and auto-completion:
|
||||
|
||||
```ts#app.ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
// TypeScript knows the types!
|
||||
config.server.port; // number
|
||||
config.server.host; // string
|
||||
config.features; // string
|
||||
|
||||
// TypeScript will catch errors
|
||||
config.server.unknown; // Error: Property 'unknown' does not exist
|
||||
```
|
||||
|
||||
This approach works for both ES modules and CommonJS, giving you full type safety while Bun continues to handle the actual YAML parsing at runtime.
|
||||
|
||||
## Hot Reloading with YAML
|
||||
|
||||
One of the most powerful features of Bun's YAML support is hot reloading. When you run your application with `bun --hot`, changes to YAML files are automatically detected and reloaded without closing connections
|
||||
|
||||
@@ -733,6 +733,10 @@ Whether to enable minification. Default `false`.
|
||||
When targeting `bun`, identifiers will be minified by default.
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
When `minify.syntax` is enabled, unused function and class expression names are removed unless `minify.keepNames` is set to `true` or `--keep-names` flag is used.
|
||||
{% /callout %}
|
||||
|
||||
To enable all minification options:
|
||||
|
||||
{% codetabs group="a" %}
|
||||
@@ -763,12 +767,16 @@ await Bun.build({
|
||||
whitespace: true,
|
||||
identifiers: true,
|
||||
syntax: true,
|
||||
keepNames: false, // default
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --minify-whitespace --minify-identifiers --minify-syntax
|
||||
|
||||
# To preserve function and class names during minification:
|
||||
$ bun build ./index.tsx --outdir ./out --minify --keep-names
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -1553,6 +1561,7 @@ interface BuildConfig {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
/**
|
||||
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json
|
||||
|
||||
109
docs/cli/test.md
109
docs/cli/test.md
@@ -109,6 +109,85 @@ Use the `--timeout` flag to specify a _per-test_ timeout in milliseconds. If a t
|
||||
$ bun test --timeout 20
|
||||
```
|
||||
|
||||
## Concurrent test execution
|
||||
|
||||
By default, Bun runs all tests sequentially within each test file. You can enable concurrent execution to run async tests in parallel, significantly speeding up test suites with independent tests.
|
||||
|
||||
### `--concurrent` flag
|
||||
|
||||
Use the `--concurrent` flag to run all tests concurrently within their respective files:
|
||||
|
||||
```sh
|
||||
$ bun test --concurrent
|
||||
```
|
||||
|
||||
When this flag is enabled, all tests will run in parallel unless explicitly marked with `test.serial`.
|
||||
|
||||
### `--max-concurrency` flag
|
||||
|
||||
Control the maximum number of tests running simultaneously with the `--max-concurrency` flag:
|
||||
|
||||
```sh
|
||||
# Limit to 4 concurrent tests
|
||||
$ bun test --concurrent --max-concurrency 4
|
||||
|
||||
# Default: 20
|
||||
$ bun test --concurrent
|
||||
```
|
||||
|
||||
This helps prevent resource exhaustion when running many concurrent tests. The default value is 20.
|
||||
|
||||
### `test.concurrent`
|
||||
|
||||
Mark individual tests to run concurrently, even when the `--concurrent` flag is not used:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
// These tests run in parallel with each other
|
||||
test.concurrent("concurrent test 1", async () => {
|
||||
await fetch("/api/endpoint1");
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
test.concurrent("concurrent test 2", async () => {
|
||||
await fetch("/api/endpoint2");
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
|
||||
// This test runs sequentially
|
||||
test("sequential test", () => {
|
||||
expect(1 + 1).toBe(2);
|
||||
});
|
||||
```
|
||||
|
||||
### `test.serial`
|
||||
|
||||
Force tests to run sequentially, even when the `--concurrent` flag is enabled:
|
||||
|
||||
```ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
let sharedState = 0;
|
||||
|
||||
// These tests must run in order
|
||||
test.serial("first serial test", () => {
|
||||
sharedState = 1;
|
||||
expect(sharedState).toBe(1);
|
||||
});
|
||||
|
||||
test.serial("second serial test", () => {
|
||||
// Depends on the previous test
|
||||
expect(sharedState).toBe(1);
|
||||
sharedState = 2;
|
||||
});
|
||||
|
||||
// This test can run concurrently if --concurrent is enabled
|
||||
test("independent test", () => {
|
||||
expect(true).toBe(true);
|
||||
});
|
||||
```
|
||||
|
||||
## Rerun tests
|
||||
|
||||
Use the `--rerun-each` flag to run each test multiple times. This is useful for detecting flaky or non-deterministic test failures.
|
||||
@@ -117,6 +196,36 @@ Use the `--rerun-each` flag to run each test multiple times. This is useful for
|
||||
$ bun test --rerun-each 100
|
||||
```
|
||||
|
||||
## Randomize test execution order
|
||||
|
||||
Use the `--randomize` flag to run tests in a random order. This helps detect tests that depend on shared state or execution order.
|
||||
|
||||
```sh
|
||||
$ bun test --randomize
|
||||
```
|
||||
|
||||
When using `--randomize`, the seed used for randomization will be displayed in the test summary:
|
||||
|
||||
```sh
|
||||
$ bun test --randomize
|
||||
# ... test output ...
|
||||
--seed=12345
|
||||
2 pass
|
||||
8 fail
|
||||
Ran 10 tests across 2 files. [50.00ms]
|
||||
```
|
||||
|
||||
### Reproducible random order with `--seed`
|
||||
|
||||
Use the `--seed` flag to specify a seed for the randomization. This allows you to reproduce the same test order when debugging order-dependent failures.
|
||||
|
||||
```sh
|
||||
# Reproduce a previous randomized run
|
||||
$ bun test --seed 123456
|
||||
```
|
||||
|
||||
The `--seed` flag implies `--randomize`, so you don't need to specify both. Using the same seed value will always produce the same test execution order, making it easier to debug intermittent failures caused by test interdependencies.
|
||||
|
||||
## Bail out with `--bail`
|
||||
|
||||
Use the `--bail` flag to abort the test run early after a pre-determined number of test failures. By default Bun will run all tests and report all failures, but sometimes in CI environments it's preferable to terminate earlier to reduce CPU usage.
|
||||
|
||||
@@ -9,8 +9,9 @@ $ bun create next-app
|
||||
✔ What is your project named? … my-app
|
||||
✔ Would you like to use TypeScript with this project? … No / Yes
|
||||
✔ Would you like to use ESLint with this project? … No / Yes
|
||||
✔ Would you like to use Tailwind CSS? ... No / Yes
|
||||
✔ Would you like to use `src/` directory with this project? … No / Yes
|
||||
✔ Would you like to use experimental `app/` directory with this project? … No / Yes
|
||||
✔ Would you like to use App Router? (recommended) ... No / Yes
|
||||
✔ What import alias would you like configured? … @/*
|
||||
Creating a new Next.js app in /path/to/my-app.
|
||||
```
|
||||
|
||||
@@ -73,4 +73,30 @@ console.log(data.hobbies); // => ["reading", "coding"]
|
||||
|
||||
---
|
||||
|
||||
## TypeScript Support
|
||||
|
||||
To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`);
|
||||
|
||||
```ts#config.yaml.d.ts
|
||||
const contents: {
|
||||
database: {
|
||||
host: string;
|
||||
port: number;
|
||||
name: string;
|
||||
};
|
||||
server: {
|
||||
port: number;
|
||||
timeout: number;
|
||||
};
|
||||
features: {
|
||||
auth: boolean;
|
||||
rateLimit: boolean;
|
||||
};
|
||||
};
|
||||
|
||||
export = contents;
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > YAML](https://bun.com/docs/api/yaml) for complete documentation on YAML support in Bun.
|
||||
|
||||
@@ -359,7 +359,7 @@ export default {
|
||||
page("api/file-io", "File I/O", {
|
||||
description: `Read and write files fast with Bun's heavily optimized file system API.`,
|
||||
}), // "`Bun.write`"),
|
||||
page("api/redis", "Redis client", {
|
||||
page("api/redis", "Redis Client", {
|
||||
description: `Bun provides a fast, native Redis client with automatic command pipelining for better performance.`,
|
||||
}),
|
||||
page("api/import-meta", "import.meta", {
|
||||
@@ -407,6 +407,9 @@ export default {
|
||||
page("api/cc", "C Compiler", {
|
||||
description: `Build & run native C from JavaScript with Bun's native C compiler API`,
|
||||
}), // "`bun:ffi`"),
|
||||
page("api/secrets", "Secrets", {
|
||||
description: `Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.`,
|
||||
}), // "`Bun.secrets`"),
|
||||
page("cli/test", "Testing", {
|
||||
description: `Bun's built-in test runner is fast and uses Jest-compatible syntax.`,
|
||||
}), // "`bun:test`"),
|
||||
|
||||
@@ -232,6 +232,23 @@ Set path where coverage reports will be saved. Please notice, that it works only
|
||||
coverageDir = "path/to/somewhere" # default "coverage"
|
||||
```
|
||||
|
||||
### `test.concurrentTestGlob`
|
||||
|
||||
Specify a glob pattern to automatically run matching test files with concurrent test execution enabled. Test files matching this pattern will behave as if the `--concurrent` flag was passed, running all tests within those files concurrently.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
concurrentTestGlob = "**/concurrent-*.test.ts"
|
||||
```
|
||||
|
||||
This is useful for:
|
||||
|
||||
- Gradually migrating test suites to concurrent execution
|
||||
- Running integration tests concurrently while keeping unit tests sequential
|
||||
- Separating fast concurrent tests from tests that require sequential execution
|
||||
|
||||
The `--concurrent` CLI flag will override this setting when specified.
|
||||
|
||||
## Package manager
|
||||
|
||||
Package management is a complex issue; to support a range of use cases, the behavior of `bun install` can be configured under the `[install]` section.
|
||||
@@ -521,7 +538,7 @@ When a security scanner is configured:
|
||||
- Installation is cancelled if fatal issues are found
|
||||
- Security warnings are displayed during installation
|
||||
|
||||
Learn more about [using and writing security scanners](/docs/install/security).
|
||||
Learn more about [using and writing security scanners](/docs/install/security-scanner-api).
|
||||
|
||||
### `install.linker`
|
||||
|
||||
|
||||
@@ -46,6 +46,25 @@ smol = true # Reduce memory usage during test runs
|
||||
|
||||
This is equivalent to using the `--smol` flag on the command line.
|
||||
|
||||
### Test execution
|
||||
|
||||
#### concurrentTestGlob
|
||||
|
||||
Automatically run test files matching a glob pattern with concurrent test execution enabled. This is useful for gradually migrating test suites to concurrent execution or for running specific test types concurrently.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
concurrentTestGlob = "**/concurrent-*.test.ts" # Run files matching this pattern concurrently
|
||||
```
|
||||
|
||||
Test files matching this pattern will behave as if the `--concurrent` flag was passed, running all tests within those files concurrently. This allows you to:
|
||||
|
||||
- Gradually migrate your test suite to concurrent execution
|
||||
- Run integration tests concurrently while keeping unit tests sequential
|
||||
- Separate fast concurrent tests from tests that require sequential execution
|
||||
|
||||
The `--concurrent` CLI flag will override this setting when specified, forcing all tests to run concurrently regardless of the glob pattern.
|
||||
|
||||
### Coverage options
|
||||
|
||||
In addition to the options documented in the [coverage documentation](./coverage.md), the following options are available:
|
||||
|
||||
132
docs/test/examples/concurrent-test-glob.md
Normal file
132
docs/test/examples/concurrent-test-glob.md
Normal file
@@ -0,0 +1,132 @@
|
||||
# Concurrent Test Glob Example
|
||||
|
||||
This example demonstrates how to use the `concurrentTestGlob` option to selectively run tests concurrently based on file naming patterns.
|
||||
|
||||
## Project Structure
|
||||
|
||||
```text
|
||||
my-project/
|
||||
├── bunfig.toml
|
||||
├── tests/
|
||||
│ ├── unit/
|
||||
│ │ ├── math.test.ts # Sequential
|
||||
│ │ └── utils.test.ts # Sequential
|
||||
│ └── integration/
|
||||
│ ├── concurrent-api.test.ts # Concurrent
|
||||
│ └── concurrent-database.test.ts # Concurrent
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### bunfig.toml
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Run all test files with "concurrent-" prefix concurrently
|
||||
concurrentTestGlob = "**/concurrent-*.test.ts"
|
||||
```
|
||||
|
||||
## Test Files
|
||||
|
||||
### Unit Test (Sequential)
|
||||
|
||||
`tests/unit/math.test.ts`
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
// These tests run sequentially by default
|
||||
// Good for tests that share state or have specific ordering requirements
|
||||
let sharedState = 0;
|
||||
|
||||
test("addition", () => {
|
||||
sharedState = 5 + 3;
|
||||
expect(sharedState).toBe(8);
|
||||
});
|
||||
|
||||
test("uses previous state", () => {
|
||||
// This test depends on the previous test's state
|
||||
expect(sharedState).toBe(8);
|
||||
});
|
||||
```
|
||||
|
||||
### Integration Test (Concurrent)
|
||||
|
||||
`tests/integration/concurrent-api.test.ts`
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
// These tests automatically run concurrently due to filename matching the glob pattern.
|
||||
// Using test() is equivalent to test.concurrent() when the file matches concurrentTestGlob.
|
||||
// Each test is independent and can run in parallel.
|
||||
|
||||
test("fetch user data", async () => {
|
||||
const response = await fetch("/api/user/1");
|
||||
expect(response.ok).toBe(true);
|
||||
});
|
||||
|
||||
test("fetch posts", async () => {
|
||||
const response = await fetch("/api/posts");
|
||||
expect(response.ok).toBe(true);
|
||||
});
|
||||
|
||||
test("fetch comments", async () => {
|
||||
const response = await fetch("/api/comments");
|
||||
expect(response.ok).toBe(true);
|
||||
});
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all tests - concurrent-*.test.ts files will run concurrently
|
||||
bun test
|
||||
|
||||
# Override: Force ALL tests to run concurrently
|
||||
# Note: This overrides bunfig.toml and runs all tests concurrently, regardless of glob
|
||||
bun test --concurrent
|
||||
|
||||
# Run only unit tests (sequential)
|
||||
bun test tests/unit
|
||||
|
||||
# Run only integration tests (concurrent due to glob pattern)
|
||||
bun test tests/integration
|
||||
```
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Gradual Migration**: Migrate to concurrent tests file by file by renaming them
|
||||
2. **Clear Organization**: File naming convention indicates execution mode
|
||||
3. **Performance**: Integration tests run faster in parallel
|
||||
4. **Safety**: Unit tests remain sequential where needed
|
||||
5. **Flexibility**: Easy to change execution mode by renaming files
|
||||
|
||||
## Migration Strategy
|
||||
|
||||
To migrate existing tests to concurrent execution:
|
||||
|
||||
1. Start with independent integration tests
|
||||
2. Rename files to match the glob pattern: `mv api.test.ts concurrent-api.test.ts`
|
||||
3. Verify tests still pass
|
||||
4. Monitor for race conditions or shared state issues
|
||||
5. Continue migrating stable tests incrementally
|
||||
|
||||
## Tips
|
||||
|
||||
- Use descriptive prefixes: `concurrent-`, `parallel-`, `async-`
|
||||
- Keep related sequential tests together
|
||||
- Document why certain tests must remain sequential
|
||||
- Use `test.concurrent()` for fine-grained control in sequential files
|
||||
(In files matched by `concurrentTestGlob`, plain `test()` already runs concurrently)
|
||||
- Consider separate globs for different test types:
|
||||
|
||||
```toml
|
||||
[test]
|
||||
# Multiple patterns for different test categories
|
||||
concurrentTestGlob = [
|
||||
"**/integration/*.test.ts",
|
||||
"**/e2e/*.test.ts",
|
||||
"**/concurrent-*.test.ts"
|
||||
]
|
||||
```
|
||||
@@ -149,12 +149,6 @@ describe.only("only", () => {
|
||||
|
||||
The following command will only execute tests #2 and #3.
|
||||
|
||||
```sh
|
||||
$ bun test --only
|
||||
```
|
||||
|
||||
The following command will only execute tests #1, #2 and #3.
|
||||
|
||||
```sh
|
||||
$ bun test
|
||||
```
|
||||
@@ -756,3 +750,76 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
|
||||
|
||||
{% /table %}
|
||||
|
||||
## TypeScript Type Safety
|
||||
|
||||
Bun's test runner provides enhanced TypeScript support with intelligent type checking for your test assertions. The type system helps catch potential bugs at compile time while still allowing flexibility when needed.
|
||||
|
||||
### Strict Type Checking by Default
|
||||
|
||||
By default, Bun's test matchers enforce strict type checking between the actual value and expected value:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("strict typing", () => {
|
||||
const str = "hello";
|
||||
const num = 42;
|
||||
|
||||
expect(str).toBe("hello"); // ✅ OK: string to string
|
||||
expect(num).toBe(42); // ✅ OK: number to number
|
||||
expect(str).toBe(42); // ❌ TypeScript error: string vs number
|
||||
});
|
||||
```
|
||||
|
||||
This helps catch common mistakes where you might accidentally compare values of different types.
|
||||
|
||||
### Relaxed Type Checking with Type Parameters
|
||||
|
||||
Sometimes you need more flexibility in your tests, especially when working with:
|
||||
|
||||
- Dynamic data from APIs
|
||||
- Polymorphic functions that can return multiple types
|
||||
- Generic utility functions
|
||||
- Migration of existing test suites
|
||||
|
||||
For these cases, you can "opt out" of strict type checking by providing an explicit type parameter to matcher methods:
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
|
||||
test("relaxed typing with type parameters", () => {
|
||||
const value: unknown = getSomeValue();
|
||||
|
||||
// These would normally cause TypeScript errors, but type parameters allow them:
|
||||
expect(value).toBe<number>(42); // No TS error, runtime check still works
|
||||
expect(value).toEqual<string>("hello"); // No TS error, runtime check still works
|
||||
expect(value).toStrictEqual<boolean>(true); // No TS error, runtime check still works
|
||||
});
|
||||
|
||||
test("useful for dynamic data", () => {
|
||||
const apiResponse: any = { status: "success" };
|
||||
|
||||
// Without type parameter: TypeScript error (any vs string)
|
||||
// expect(apiResponse.status).toBe("success");
|
||||
|
||||
// With type parameter: No TypeScript error, runtime assertion still enforced
|
||||
expect(apiResponse.status).toBe<string>("success"); // ✅ OK
|
||||
});
|
||||
```
|
||||
|
||||
### Migration from Looser Type Systems
|
||||
|
||||
If migrating from a test framework with looser TypeScript integration, you can use type parameters as a stepping stone:
|
||||
|
||||
```ts
|
||||
// Old Jest test that worked but wasn't type-safe
|
||||
expect(response.data).toBe(200); // No type error in some setups
|
||||
|
||||
// Bun equivalent with explicit typing during migration
|
||||
expect(response.data).toBe<number>(200); // Explicit about expected type
|
||||
|
||||
// Ideal Bun test after refactoring
|
||||
const statusCode: number = response.data;
|
||||
expect(statusCode).toBe(200); // Type-safe without explicit parameter
|
||||
```
|
||||
|
||||
@@ -19,3 +19,6 @@ command script import -c bun_pretty_printer.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
|
||||
# do not pass SIGHUP on to child process. it is often not the real error and the stop point will be nonsensical.
|
||||
process handle -p false -s false -n true SIGHUP
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.22",
|
||||
"version": "1.2.24",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
@@ -33,7 +33,7 @@
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"build:debug:noasan": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=OFF -B build/debug --log-level=NOTICE",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",
|
||||
@@ -84,6 +84,11 @@
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests ",
|
||||
"node:test:cp": "bun ./scripts/fetch-node-test.ts ",
|
||||
"clean:zig": "rm -rf build/debug/cache/zig build/debug/CMakeCache.txt 'build/debug/*.o' .zig-cache zig-out || true",
|
||||
"machine:linux:ubuntu": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=ubuntu --release=25.04",
|
||||
"machine:linux:debian": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=debian --release=12",
|
||||
"machine:linux:alpine": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=alpine --release=3.21",
|
||||
"machine:linux:amazonlinux": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=linux --distro=amazonlinux --release=2023",
|
||||
"machine:windows:2019": "./scripts/machine.mjs ssh --cloud=aws --arch=x64 --instance-type c7i.2xlarge --os=windows --release=2019",
|
||||
"sync-webkit-source": "bun ./scripts/sync-webkit-source.ts"
|
||||
}
|
||||
}
|
||||
|
||||
14
packages/bun-error/bun.lock
Normal file
14
packages/bun-error/bun.lock
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bun-error",
|
||||
"dependencies": {
|
||||
"preact": "^10.27.2",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"preact": ["preact@10.27.2", "", {}, "sha512-5SYSgFKSyhCbk6SrXyMpqjb5+MQBgfvEKE/OC+PujcY34sOpqtr+0AZQtPYx5IA6VxynQ7rUPCtKzyovpj9Bpg=="],
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import React, { createContext, useContext } from "react";
|
||||
import { render, unmountComponentAtNode } from "react-dom";
|
||||
import type { JSX } from "preact";
|
||||
import { createContext, render } from "preact";
|
||||
import { useCallback, useContext, useEffect, useRef, useState } from "preact/hooks";
|
||||
import type {
|
||||
FallbackMessageContainer,
|
||||
JSException,
|
||||
@@ -164,17 +165,17 @@ const maybeBlobFileURL = (filename: string, line?: number, column?: number): str
|
||||
return srcFileURL(filename, line, column);
|
||||
};
|
||||
|
||||
const openWithoutFlashOfNewTab: React.MouseEventHandler<HTMLAnchorElement> = event => {
|
||||
const target = event.currentTarget;
|
||||
const openWithoutFlashOfNewTab: JSX.MouseEventHandler<HTMLAnchorElement> = event => {
|
||||
const target = event.currentTarget as HTMLAnchorElement;
|
||||
const href = target.getAttribute("href");
|
||||
if (!href || event.button !== 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
event.preventDefault();
|
||||
event.nativeEvent.preventDefault();
|
||||
event.nativeEvent.stopPropagation();
|
||||
event.nativeEvent.stopImmediatePropagation();
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
event.stopImmediatePropagation();
|
||||
|
||||
const headers = new Headers();
|
||||
headers.set("Accept", "text/plain");
|
||||
@@ -317,17 +318,17 @@ const AsyncSourceLines = ({
|
||||
highlight: number;
|
||||
highlightColumnStart: number;
|
||||
highlightColumnEnd: number;
|
||||
children?: React.ReactNode;
|
||||
children?: any;
|
||||
buildURL: (line?: number, column?: number) => string;
|
||||
sourceLines: SourceLine[];
|
||||
setSourceLines: (lines: SourceLine[]) => void;
|
||||
}) => {
|
||||
const [loadState, setLoadState] = React.useState(LoadState.pending);
|
||||
const [loadState, setLoadState] = useState(LoadState.pending);
|
||||
|
||||
const controller = React.useRef<AbortController | null>(null);
|
||||
const url = React.useRef<string>(buildURL(0, 0));
|
||||
const controller = useRef<AbortController | null>(null);
|
||||
const url = useRef<string>(buildURL(0, 0));
|
||||
|
||||
React.useEffect(() => {
|
||||
useEffect(() => {
|
||||
controller.current = new AbortController();
|
||||
var cancelled = false;
|
||||
fetch(url.current, {
|
||||
@@ -432,7 +433,7 @@ const SourceLines = ({
|
||||
highlight: number;
|
||||
highlightColumnStart: number;
|
||||
highlightColumnEnd: number;
|
||||
children?: React.ReactNode;
|
||||
children?: any;
|
||||
buildURL: (line?: number, column?: number) => string;
|
||||
}) => {
|
||||
let start = sourceLines.length;
|
||||
@@ -461,7 +462,7 @@ const SourceLines = ({
|
||||
const leftPad = maxLineNumber.toString(10).length - minLineNumber.toString(10).length;
|
||||
|
||||
const _sourceLines = sourceLines.slice(start, end);
|
||||
const lines = new Array(_sourceLines.length + React.Children.count(children));
|
||||
const lines = new Array(_sourceLines.length + (Array.isArray(children) ? children.length : children ? 1 : 0));
|
||||
|
||||
let highlightI = 0;
|
||||
for (let i = 0; i < _sourceLines.length; i++) {
|
||||
@@ -513,7 +514,7 @@ const SourceLines = ({
|
||||
const BuildErrorSourceLines = ({ location, filename }: { location: Location; filename: string }) => {
|
||||
const { line, line_text, column } = location;
|
||||
const sourceLines: SourceLine[] = [{ line, text: line_text }];
|
||||
const buildURL = React.useCallback((line, column) => srcFileURL(filename, line, column), [srcFileURL, filename]);
|
||||
const buildURL = useCallback((line, column) => srcFileURL(filename, line, column), [filename]);
|
||||
return (
|
||||
<SourceLines
|
||||
sourceLines={sourceLines}
|
||||
@@ -669,15 +670,15 @@ const NativeStackTrace = ({
|
||||
frames: StackFrame[];
|
||||
sourceLines: SourceLine[];
|
||||
setSourceLines: (sourceLines: SourceLine[]) => void;
|
||||
children?: React.ReactNode;
|
||||
children?: any;
|
||||
isClient: boolean;
|
||||
}) => {
|
||||
const { file = "", position } = frames[0];
|
||||
const { cwd } = useContext(ErrorGroupContext);
|
||||
const filename = normalizedFilename(file, cwd);
|
||||
const urlBuilder = isClient ? clientURL : maybeBlobFileURL;
|
||||
const ref = React.useRef<HTMLDivElement>(null);
|
||||
const buildURL = React.useCallback((line, column) => urlBuilder(file, line, column), [file, urlBuilder]);
|
||||
const ref = useRef<HTMLDivElement>(null);
|
||||
const buildURL = useCallback((line, column) => urlBuilder(file, line, column), [file, urlBuilder]);
|
||||
|
||||
return (
|
||||
<div ref={ref} className={`BunError-NativeStackTrace`}>
|
||||
@@ -732,7 +733,7 @@ const Indent = ({ by, children }) => {
|
||||
|
||||
const JSException = ({ value, isClient = false }: { value: JSExceptionType; isClient: boolean }) => {
|
||||
const tag = isClient ? ErrorTagType.client : ErrorTagType.server;
|
||||
const [sourceLines, _setSourceLines] = React.useState(value?.stack?.source_lines ?? []);
|
||||
const [sourceLines, _setSourceLines] = useState(value?.stack?.source_lines ?? []);
|
||||
var message = value.message || "";
|
||||
var name = value.name || "";
|
||||
if (!name && !message) {
|
||||
@@ -1242,7 +1243,7 @@ export function renderRuntimeError(error: Error) {
|
||||
|
||||
export function dismissError() {
|
||||
if (reactRoot) {
|
||||
unmountComponentAtNode(reactRoot);
|
||||
render(null, reactRoot);
|
||||
const root = document.getElementById("__bun__error-root");
|
||||
if (root) root.remove();
|
||||
reactRoot = null;
|
||||
|
||||
@@ -5,14 +5,9 @@
|
||||
"license": "MIT",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "esbuild --define:process.env.NODE_ENV=\"'production'\" --minify index.tsx bun-error.css --bundle --outdir=dist --platform=browser --format=esm"
|
||||
"build": "bun build --production --define:process.env.NODE_ENV=\"'production'\" --minify index.tsx bun-error.css --outdir=dist --target=browser --format=esm"
|
||||
},
|
||||
"dependencies": {
|
||||
"esbuild": "latest",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^17.0.39"
|
||||
"preact": "^10.27.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"jsx": "react",
|
||||
"lib": ["ESNext", "DOM"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"jsx": "react-jsx",
|
||||
"jsxImportSource": "preact"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ export const platforms: Platform[] = [
|
||||
},
|
||||
{
|
||||
os: "linux",
|
||||
arch: "aarch64",
|
||||
arch: "arm64",
|
||||
abi: "musl",
|
||||
bin: "bun-linux-aarch64-musl",
|
||||
exe: "bin/bun",
|
||||
|
||||
40
packages/bun-types/bun.d.ts
vendored
40
packages/bun-types/bun.d.ts
vendored
@@ -636,7 +636,7 @@ declare module "bun" {
|
||||
* import { YAML } from "bun";
|
||||
*
|
||||
* console.log(YAML.parse("123")) // 123
|
||||
* console.log(YAML.parse("123")) // null
|
||||
* console.log(YAML.parse("null")) // null
|
||||
* console.log(YAML.parse("false")) // false
|
||||
* console.log(YAML.parse("abc")) // "abc"
|
||||
* console.log(YAML.parse("- abc")) // [ "abc" ]
|
||||
@@ -653,7 +653,10 @@ declare module "bun" {
|
||||
*
|
||||
* @param input The JavaScript value to stringify.
|
||||
* @param replacer Currently not supported.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation. The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation.
|
||||
* Without this parameter, outputs flow-style (single-line) YAML.
|
||||
* With this parameter, outputs block-style (multi-line) YAML.
|
||||
* The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @returns A string containing the YAML document.
|
||||
*
|
||||
* @example
|
||||
@@ -661,19 +664,24 @@ declare module "bun" {
|
||||
* import { YAML } from "bun";
|
||||
*
|
||||
* const input = {
|
||||
* abc: "def"
|
||||
* abc: "def",
|
||||
* num: 123
|
||||
* };
|
||||
*
|
||||
* // Without space - flow style (single-line)
|
||||
* console.log(YAML.stringify(input));
|
||||
* // # output
|
||||
* // {abc: def,num: 123}
|
||||
*
|
||||
* // With space - block style (multi-line)
|
||||
* console.log(YAML.stringify(input, null, 2));
|
||||
* // abc: def
|
||||
* // num: 123
|
||||
*
|
||||
* const cycle = {};
|
||||
* cycle.obj = cycle;
|
||||
* console.log(YAML.stringify(cycle));
|
||||
* // # output
|
||||
* // &root
|
||||
* // obj:
|
||||
* // *root
|
||||
* console.log(YAML.stringify(cycle, null, 2));
|
||||
* // &1
|
||||
* // obj: *1
|
||||
*/
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
|
||||
}
|
||||
@@ -1819,6 +1827,7 @@ declare module "bun" {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -1898,6 +1907,18 @@ declare module "bun" {
|
||||
*/
|
||||
tsconfig?: string;
|
||||
|
||||
/**
|
||||
* JSX configuration options
|
||||
*/
|
||||
jsx?: {
|
||||
runtime?: "automatic" | "classic";
|
||||
importSource?: string;
|
||||
factory?: string;
|
||||
fragment?: string;
|
||||
sideEffects?: boolean;
|
||||
development?: boolean;
|
||||
};
|
||||
|
||||
outdir?: string;
|
||||
}
|
||||
|
||||
@@ -5026,6 +5047,7 @@ declare module "bun" {
|
||||
type SupportedCryptoAlgorithms =
|
||||
| "blake2b256"
|
||||
| "blake2b512"
|
||||
| "blake2s256"
|
||||
| "md4"
|
||||
| "md5"
|
||||
| "ripemd160"
|
||||
|
||||
9
packages/bun-types/globals.d.ts
vendored
9
packages/bun-types/globals.d.ts
vendored
@@ -1556,6 +1556,15 @@ declare var URL: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
/**
|
||||
* The **`AbortController`** interface represents a controller object that allows you to abort one or more Web requests as and when desired.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/docs/Web/API/AbortController)
|
||||
*/
|
||||
interface AbortController {
|
||||
readonly signal: AbortSignal;
|
||||
abort(reason?: any): void;
|
||||
}
|
||||
declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
|
||||
"AbortController",
|
||||
{
|
||||
|
||||
4
packages/bun-types/index.d.ts
vendored
4
packages/bun-types/index.d.ts
vendored
@@ -26,6 +26,6 @@
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
// @ts-ignore Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// Must disable this so it doesn't conflict with the DOM onmessage type, but still
|
||||
// allows us to declare our own globals that Node's types can "see" and not conflict with
|
||||
declare var onmessage: never;
|
||||
declare var onmessage: Bun.__internal.UseLibDomIfAvailable<"onmessage", never>;
|
||||
|
||||
270
packages/bun-types/redis.d.ts
vendored
270
packages/bun-types/redis.d.ts
vendored
@@ -52,21 +52,25 @@ declare module "bun" {
|
||||
|
||||
export namespace RedisClient {
|
||||
type KeyLike = string | ArrayBufferView | Blob;
|
||||
type StringPubSubListener = (message: string, channel: string) => void;
|
||||
|
||||
// Buffer subscriptions are not yet implemented
|
||||
// type BufferPubSubListener = (message: Uint8Array<ArrayBuffer>, channel: string) => void;
|
||||
}
|
||||
|
||||
export class RedisClient {
|
||||
/**
|
||||
* Creates a new Redis client
|
||||
* @param url URL to connect to, defaults to process.env.VALKEY_URL, process.env.REDIS_URL, or "valkey://localhost:6379"
|
||||
*
|
||||
* @param url URL to connect to, defaults to `process.env.VALKEY_URL`,
|
||||
* `process.env.REDIS_URL`, or `"valkey://localhost:6379"`
|
||||
* @param options Additional options
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const valkey = new RedisClient();
|
||||
*
|
||||
* await valkey.set("hello", "world");
|
||||
*
|
||||
* console.log(await valkey.get("hello"));
|
||||
* const redis = new RedisClient();
|
||||
* await redis.set("hello", "world");
|
||||
* console.log(await redis.get("hello"));
|
||||
* ```
|
||||
*/
|
||||
constructor(url?: string, options?: RedisOptions);
|
||||
@@ -88,12 +92,14 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Callback fired when the client disconnects from the Redis server
|
||||
*
|
||||
* @param error The error that caused the disconnection
|
||||
*/
|
||||
onclose: ((this: RedisClient, error: Error) => void) | null;
|
||||
|
||||
/**
|
||||
* Connect to the Redis server
|
||||
*
|
||||
* @returns A promise that resolves when connected
|
||||
*/
|
||||
connect(): Promise<void>;
|
||||
@@ -152,10 +158,12 @@ declare module "bun" {
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, px: "PX", milliseconds: number): Promise<"OK">;
|
||||
|
||||
/**
|
||||
* Set key to hold the string value with expiration at a specific Unix timestamp
|
||||
* Set key to hold the string value with expiration at a specific Unix
|
||||
* timestamp
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @param exat Set the specified Unix time at which the key will expire, in seconds
|
||||
* @param exat Set the specified Unix time at which the key will expire, in
|
||||
* seconds
|
||||
* @returns Promise that resolves with "OK" on success
|
||||
*/
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, exat: "EXAT", timestampSeconds: number): Promise<"OK">;
|
||||
@@ -179,7 +187,8 @@ declare module "bun" {
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @param nx Only set the key if it does not already exist
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key already exists
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key
|
||||
* already exists
|
||||
*/
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, nx: "NX"): Promise<"OK" | null>;
|
||||
|
||||
@@ -188,7 +197,8 @@ declare module "bun" {
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @param xx Only set the key if it already exists
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key does not exist
|
||||
* @returns Promise that resolves with "OK" on success, or null if the key
|
||||
* does not exist
|
||||
*/
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, xx: "XX"): Promise<"OK" | null>;
|
||||
|
||||
@@ -196,8 +206,10 @@ declare module "bun" {
|
||||
* Set key to hold the string value and return the old value
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @param get Return the old string stored at key, or null if key did not exist
|
||||
* @returns Promise that resolves with the old value, or null if key did not exist
|
||||
* @param get Return the old string stored at key, or null if key did not
|
||||
* exist
|
||||
* @returns Promise that resolves with the old value, or null if key did not
|
||||
* exist
|
||||
*/
|
||||
set(key: RedisClient.KeyLike, value: RedisClient.KeyLike, get: "GET"): Promise<string | null>;
|
||||
|
||||
@@ -243,7 +255,8 @@ declare module "bun" {
|
||||
/**
|
||||
* Determine if a key exists
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with true if the key exists, false otherwise
|
||||
* @returns Promise that resolves with true if the key exists, false
|
||||
* otherwise
|
||||
*/
|
||||
exists(key: RedisClient.KeyLike): Promise<boolean>;
|
||||
|
||||
@@ -258,7 +271,8 @@ declare module "bun" {
|
||||
/**
|
||||
* Get the time to live for a key in seconds
|
||||
* @param key The key to get the TTL for
|
||||
* @returns Promise that resolves with the TTL, -1 if no expiry, or -2 if key doesn't exist
|
||||
* @returns Promise that resolves with the TTL, -1 if no expiry, or -2 if
|
||||
* key doesn't exist
|
||||
*/
|
||||
ttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -270,6 +284,14 @@ declare module "bun" {
|
||||
*/
|
||||
hmset(key: RedisClient.KeyLike, fieldValues: string[]): Promise<string>;
|
||||
|
||||
/**
|
||||
* Get the value of a hash field
|
||||
* @param key The hash key
|
||||
* @param field The field to get
|
||||
* @returns Promise that resolves with the field value or null if the field doesn't exist
|
||||
*/
|
||||
hget(key: RedisClient.KeyLike, field: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all the given hash fields
|
||||
* @param key The hash key
|
||||
@@ -282,7 +304,8 @@ declare module "bun" {
|
||||
* Check if a value is a member of a set
|
||||
* @param key The set key
|
||||
* @param member The member to check
|
||||
* @returns Promise that resolves with true if the member exists, false otherwise
|
||||
* @returns Promise that resolves with true if the member exists, false
|
||||
* otherwise
|
||||
*/
|
||||
sismember(key: RedisClient.KeyLike, member: string): Promise<boolean>;
|
||||
|
||||
@@ -290,7 +313,8 @@ declare module "bun" {
|
||||
* Add a member to a set
|
||||
* @param key The set key
|
||||
* @param member The member to add
|
||||
* @returns Promise that resolves with 1 if the member was added, 0 if it already existed
|
||||
* @returns Promise that resolves with 1 if the member was added, 0 if it
|
||||
* already existed
|
||||
*/
|
||||
sadd(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
@@ -298,7 +322,8 @@ declare module "bun" {
|
||||
* Remove a member from a set
|
||||
* @param key The set key
|
||||
* @param member The member to remove
|
||||
* @returns Promise that resolves with 1 if the member was removed, 0 if it didn't exist
|
||||
* @returns Promise that resolves with 1 if the member was removed, 0 if it
|
||||
* didn't exist
|
||||
*/
|
||||
srem(key: RedisClient.KeyLike, member: string): Promise<number>;
|
||||
|
||||
@@ -312,14 +337,16 @@ declare module "bun" {
|
||||
/**
|
||||
* Get a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
* @returns Promise that resolves with a random member, or null if the set
|
||||
* is empty
|
||||
*/
|
||||
srandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return a random member from a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the removed member, or null if the set is empty
|
||||
* @returns Promise that resolves with the removed member, or null if the
|
||||
* set is empty
|
||||
*/
|
||||
spop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
@@ -386,28 +413,32 @@ declare module "bun" {
|
||||
/**
|
||||
* Remove and get the first element in a list
|
||||
* @param key The list key
|
||||
* @returns Promise that resolves with the first element, or null if the list is empty
|
||||
* @returns Promise that resolves with the first element, or null if the
|
||||
* list is empty
|
||||
*/
|
||||
lpop(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove the expiration from a key
|
||||
* @param key The key to persist
|
||||
* @returns Promise that resolves with 1 if the timeout was removed, 0 if the key doesn't exist or has no timeout
|
||||
* @returns Promise that resolves with 1 if the timeout was removed, 0 if
|
||||
* the key doesn't exist or has no timeout
|
||||
*/
|
||||
persist(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has
|
||||
* no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pexpiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the time to live for a key in milliseconds
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the TTL in milliseconds, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
* @returns Promise that resolves with the TTL in milliseconds, or -1 if the
|
||||
* key has no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
pttl(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -421,42 +452,48 @@ declare module "bun" {
|
||||
/**
|
||||
* Get the number of members in a set
|
||||
* @param key The set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the set
|
||||
* @returns Promise that resolves with the cardinality (number of elements)
|
||||
* of the set
|
||||
*/
|
||||
scard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the length of the value stored in a key
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the length of the string value, or 0 if the key doesn't exist
|
||||
* @returns Promise that resolves with the length of the string value, or 0
|
||||
* if the key doesn't exist
|
||||
*/
|
||||
strlen(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the number of members in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the cardinality (number of elements) of the sorted set
|
||||
* @returns Promise that resolves with the cardinality (number of elements)
|
||||
* of the sorted set
|
||||
*/
|
||||
zcard(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the highest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
* @returns Promise that resolves with the removed member and its score, or
|
||||
* null if the set is empty
|
||||
*/
|
||||
zpopmax(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Remove and return members with the lowest scores in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with the removed member and its score, or null if the set is empty
|
||||
* @returns Promise that resolves with the removed member and its score, or
|
||||
* null if the set is empty
|
||||
*/
|
||||
zpopmin(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get one or multiple random members from a sorted set
|
||||
* @param key The sorted set key
|
||||
* @returns Promise that resolves with a random member, or null if the set is empty
|
||||
* @returns Promise that resolves with a random member, or null if the set
|
||||
* is empty
|
||||
*/
|
||||
zrandmember(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
@@ -464,7 +501,8 @@ declare module "bun" {
|
||||
* Append a value to a key
|
||||
* @param key The key to append to
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the string after the append operation
|
||||
* @returns Promise that resolves with the length of the string after the
|
||||
* append operation
|
||||
*/
|
||||
append(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -472,7 +510,8 @@ declare module "bun" {
|
||||
* Set the value of a key and return its old value
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with the old value, or null if the key didn't exist
|
||||
* @returns Promise that resolves with the old value, or null if the key
|
||||
* didn't exist
|
||||
*/
|
||||
getset(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
@@ -480,7 +519,8 @@ declare module "bun" {
|
||||
* Prepend one or multiple values to a list
|
||||
* @param key The list key
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
* @returns Promise that resolves with the length of the list after the push
|
||||
* operation
|
||||
*/
|
||||
lpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -488,7 +528,8 @@ declare module "bun" {
|
||||
* Prepend a value to a list, only if the list exists
|
||||
* @param key The list key
|
||||
* @param value The value to prepend
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
* @returns Promise that resolves with the length of the list after the push
|
||||
* operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
lpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -496,7 +537,8 @@ declare module "bun" {
|
||||
* Add one or more members to a HyperLogLog
|
||||
* @param key The HyperLogLog key
|
||||
* @param element The element to add
|
||||
* @returns Promise that resolves with 1 if the HyperLogLog was altered, 0 otherwise
|
||||
* @returns Promise that resolves with 1 if the HyperLogLog was altered, 0
|
||||
* otherwise
|
||||
*/
|
||||
pfadd(key: RedisClient.KeyLike, element: string): Promise<number>;
|
||||
|
||||
@@ -504,7 +546,8 @@ declare module "bun" {
|
||||
* Append one or multiple values to a list
|
||||
* @param key The list key
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation
|
||||
* @returns Promise that resolves with the length of the list after the push
|
||||
* operation
|
||||
*/
|
||||
rpush(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -512,7 +555,8 @@ declare module "bun" {
|
||||
* Append a value to a list, only if the list exists
|
||||
* @param key The list key
|
||||
* @param value The value to append
|
||||
* @returns Promise that resolves with the length of the list after the push operation, or 0 if the list doesn't exist
|
||||
* @returns Promise that resolves with the length of the list after the push
|
||||
* operation, or 0 if the list doesn't exist
|
||||
*/
|
||||
rpushx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -520,7 +564,8 @@ declare module "bun" {
|
||||
* Set the value of a key, only if the key does not exist
|
||||
* @param key The key to set
|
||||
* @param value The value to set
|
||||
* @returns Promise that resolves with 1 if the key was set, 0 if the key was not set
|
||||
* @returns Promise that resolves with 1 if the key was set, 0 if the key
|
||||
* was not set
|
||||
*/
|
||||
setnx(key: RedisClient.KeyLike, value: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
@@ -528,14 +573,16 @@ declare module "bun" {
|
||||
* Get the score associated with the given member in a sorted set
|
||||
* @param key The sorted set key
|
||||
* @param member The member to get the score for
|
||||
* @returns Promise that resolves with the score of the member as a string, or null if the member or key doesn't exist
|
||||
* @returns Promise that resolves with the score of the member as a string,
|
||||
* or null if the member or key doesn't exist
|
||||
*/
|
||||
zscore(key: RedisClient.KeyLike, member: string): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the values of all specified keys
|
||||
* @param keys The keys to get
|
||||
* @returns Promise that resolves with an array of values, with null for keys that don't exist
|
||||
* @returns Promise that resolves with an array of values, with null for
|
||||
* keys that don't exist
|
||||
*/
|
||||
mget(...keys: RedisClient.KeyLike[]): Promise<(string | null)[]>;
|
||||
|
||||
@@ -549,37 +596,46 @@ declare module "bun" {
|
||||
/**
|
||||
* Return a serialized version of the value stored at the specified key
|
||||
* @param key The key to dump
|
||||
* @returns Promise that resolves with the serialized value, or null if the key doesn't exist
|
||||
* @returns Promise that resolves with the serialized value, or null if the
|
||||
* key doesn't exist
|
||||
*/
|
||||
dump(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the expiration time of a key as a UNIX timestamp in seconds
|
||||
*
|
||||
* @param key The key to check
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has no expiration, or -2 if the key doesn't exist
|
||||
* @returns Promise that resolves with the timestamp, or -1 if the key has
|
||||
* no expiration, or -2 if the key doesn't exist
|
||||
*/
|
||||
expiretime(key: RedisClient.KeyLike): Promise<number>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and delete the key
|
||||
*
|
||||
* @param key The key to get and delete
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
* @returns Promise that resolves with the value of the key, or null if the
|
||||
* key doesn't exist
|
||||
*/
|
||||
getdel(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and optionally set its expiration
|
||||
*
|
||||
* @param key The key to get
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
* @returns Promise that resolves with the value of the key, or null if the
|
||||
* key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration in seconds
|
||||
*
|
||||
* @param key The key to get
|
||||
* @param ex Set the specified expire time, in seconds
|
||||
* @param seconds The number of seconds until expiration
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
* @returns Promise that resolves with the value of the key, or null if the
|
||||
* key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, ex: "EX", seconds: number): Promise<string | null>;
|
||||
|
||||
@@ -594,6 +650,7 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration at a specific Unix timestamp in seconds
|
||||
*
|
||||
* @param key The key to get
|
||||
* @param exat Set the specified Unix time at which the key will expire, in seconds
|
||||
* @param timestampSeconds The Unix timestamp in seconds
|
||||
@@ -603,6 +660,7 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration at a specific Unix timestamp in milliseconds
|
||||
*
|
||||
* @param key The key to get
|
||||
* @param pxat Set the specified Unix time at which the key will expire, in milliseconds
|
||||
* @param timestampMilliseconds The Unix timestamp in milliseconds
|
||||
@@ -612,6 +670,7 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Get the value of a key and remove its expiration
|
||||
*
|
||||
* @param key The key to get
|
||||
* @param persist Remove the expiration from the key
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
@@ -626,10 +685,133 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Ping the server with a message
|
||||
*
|
||||
* @param message The message to send to the server
|
||||
* @returns Promise that resolves with the message if the server is reachable, or throws an error if the server is not reachable
|
||||
*/
|
||||
ping(message: RedisClient.KeyLike): Promise<string>;
|
||||
|
||||
/**
|
||||
* Publish a message to a Redis channel.
|
||||
*
|
||||
* @param channel The channel to publish to.
|
||||
* @param message The message to publish.
|
||||
*
|
||||
* @returns The number of clients that received the message. Note that in a
|
||||
* cluster this returns the total number of clients in the same node.
|
||||
*/
|
||||
publish(channel: string, message: string): Promise<number>;
|
||||
|
||||
/**
|
||||
* Subscribe to a Redis channel.
|
||||
*
|
||||
* Subscribing disables automatic pipelining, so all commands will be
|
||||
* received immediately.
|
||||
*
|
||||
* Subscribing moves the channel to a dedicated subscription state which
|
||||
* prevents most other commands from being executed until unsubscribed. Only
|
||||
* {@link ping `.ping()`}, {@link subscribe `.subscribe()`}, and
|
||||
* {@link unsubscribe `.unsubscribe()`} are legal to invoke in a subscribed
|
||||
* upon channel.
|
||||
*
|
||||
* @param channel The channel to subscribe to.
|
||||
* @param listener The listener to call when a message is received on the
|
||||
* channel. The listener will receive the message as the first argument and
|
||||
* the channel as the second argument.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await client.subscribe("my-channel", (message, channel) => {
|
||||
* console.log(`Received message on ${channel}: ${message}`);
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
subscribe(channel: string, listener: RedisClient.StringPubSubListener): Promise<number>;
|
||||
|
||||
/**
|
||||
* Subscribe to multiple Redis channels.
|
||||
*
|
||||
* Subscribing disables automatic pipelining, so all commands will be
|
||||
* received immediately.
|
||||
*
|
||||
* Subscribing moves the channels to a dedicated subscription state in which
|
||||
* only a limited set of commands can be executed.
|
||||
*
|
||||
* @param channels An array of channels to subscribe to.
|
||||
* @param listener The listener to call when a message is received on any of
|
||||
* the subscribed channels. The listener will receive the message as the
|
||||
* first argument and the channel as the second argument.
|
||||
*/
|
||||
subscribe(channels: string[], listener: RedisClient.StringPubSubListener): Promise<number>;
|
||||
|
||||
/**
|
||||
* Unsubscribe from a singular Redis channel.
|
||||
*
|
||||
* @param channel The channel to unsubscribe from.
|
||||
*
|
||||
* If there are no more channels subscribed to, the client automatically
|
||||
* re-enables pipelining if it was previously enabled.
|
||||
*
|
||||
* Unsubscribing moves the channel back to a normal state out of the
|
||||
* subscription state if all channels have been unsubscribed from. For
|
||||
* further details on the subscription state, see
|
||||
* {@link subscribe `.subscribe()`}.
|
||||
*/
|
||||
unsubscribe(channel: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Remove a listener from a given Redis channel.
|
||||
*
|
||||
* If there are no more channels subscribed to, the client automatically
|
||||
* re-enables pipelining if it was previously enabled.
|
||||
*
|
||||
* Unsubscribing moves the channel back to a normal state out of the
|
||||
* subscription state if all channels have been unsubscribed from. For
|
||||
* further details on the subscription state, see
|
||||
* {@link subscribe `.subscribe()`}.
|
||||
*
|
||||
* @param channel The channel to unsubscribe from.
|
||||
* @param listener The listener to remove. This is tested against
|
||||
* referential equality so you must pass the exact same listener instance as
|
||||
* when subscribing.
|
||||
*/
|
||||
unsubscribe(channel: string, listener: RedisClient.StringPubSubListener): Promise<void>;
|
||||
|
||||
/**
|
||||
* Unsubscribe from all registered Redis channels.
|
||||
*
|
||||
* The client will automatically re-enable pipelining if it was previously
|
||||
* enabled.
|
||||
*
|
||||
* Unsubscribing moves the channel back to a normal state out of the
|
||||
* subscription state if all channels have been unsubscribed from. For
|
||||
* further details on the subscription state, see
|
||||
* {@link subscribe `.subscribe()`}.
|
||||
*/
|
||||
unsubscribe(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Unsubscribe from multiple Redis channels.
|
||||
*
|
||||
* @param channels An array of channels to unsubscribe from.
|
||||
*
|
||||
* If there are no more channels subscribed to, the client automatically
|
||||
* re-enables pipelining if it was previously enabled.
|
||||
*
|
||||
* Unsubscribing moves the channel back to a normal state out of the
|
||||
* subscription state if all channels have been unsubscribed from. For
|
||||
* further details on the subscription state, see
|
||||
* {@link subscribe `.subscribe()`}.
|
||||
*/
|
||||
unsubscribe(channels: string[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* @brief Create a new RedisClient instance with the same configuration as
|
||||
* the current instance.
|
||||
*
|
||||
* This will open up a new connection to the Redis server.
|
||||
*/
|
||||
duplicate(): Promise<RedisClient>;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
4
packages/bun-types/shell.d.ts
vendored
4
packages/bun-types/shell.d.ts
vendored
@@ -58,7 +58,7 @@ declare module "bun" {
|
||||
* // "bun"
|
||||
* ```
|
||||
*/
|
||||
function env(newEnv?: Record<string, string | undefined>): $;
|
||||
function env(newEnv?: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): $;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -106,7 +106,7 @@ declare module "bun" {
|
||||
* expect(stdout.toString()).toBe("LOL!");
|
||||
* ```
|
||||
*/
|
||||
env(newEnv: Record<string, string> | undefined): this;
|
||||
env(newEnv: Record<string, string | undefined> | NodeJS.Dict<string> | undefined): this;
|
||||
|
||||
/**
|
||||
* By default, the shell will write to the current process's stdout and stderr, as well as buffering that output.
|
||||
|
||||
122
packages/bun-types/sql.d.ts
vendored
122
packages/bun-types/sql.d.ts
vendored
@@ -12,6 +12,68 @@ declare module "bun" {
|
||||
release(): void;
|
||||
}
|
||||
|
||||
type ArrayType =
|
||||
| "BOOLEAN"
|
||||
| "BYTEA"
|
||||
| "CHAR"
|
||||
| "NAME"
|
||||
| "TEXT"
|
||||
| "CHAR"
|
||||
| "VARCHAR"
|
||||
| "SMALLINT"
|
||||
| "INT2VECTOR"
|
||||
| "INTEGER"
|
||||
| "INT"
|
||||
| "BIGINT"
|
||||
| "REAL"
|
||||
| "DOUBLE PRECISION"
|
||||
| "NUMERIC"
|
||||
| "MONEY"
|
||||
| "OID"
|
||||
| "TID"
|
||||
| "XID"
|
||||
| "CID"
|
||||
| "JSON"
|
||||
| "JSONB"
|
||||
| "JSONPATH"
|
||||
| "XML"
|
||||
| "POINT"
|
||||
| "LSEG"
|
||||
| "PATH"
|
||||
| "BOX"
|
||||
| "POLYGON"
|
||||
| "LINE"
|
||||
| "CIRCLE"
|
||||
| "CIDR"
|
||||
| "MACADDR"
|
||||
| "INET"
|
||||
| "MACADDR8"
|
||||
| "DATE"
|
||||
| "TIME"
|
||||
| "TIMESTAMP"
|
||||
| "TIMESTAMPTZ"
|
||||
| "INTERVAL"
|
||||
| "TIMETZ"
|
||||
| "BIT"
|
||||
| "VARBIT"
|
||||
| "ACLITEM"
|
||||
| "PG_DATABASE"
|
||||
| (string & {});
|
||||
|
||||
/**
|
||||
* Represents a SQL array parameter
|
||||
*/
|
||||
interface SQLArrayParameter {
|
||||
/**
|
||||
* The serialized values of the array parameter
|
||||
*/
|
||||
serializedValues: string;
|
||||
/**
|
||||
* The type of the array parameter
|
||||
*/
|
||||
arrayType: ArrayType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a client within a transaction context Extends SQL with savepoint
|
||||
* functionality
|
||||
@@ -41,22 +103,22 @@ declare module "bun" {
|
||||
|
||||
class PostgresError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: string | undefined;
|
||||
public readonly detail: string | undefined;
|
||||
public readonly hint: string | undefined;
|
||||
public readonly severity: string | undefined;
|
||||
public readonly position: string | undefined;
|
||||
public readonly internalPosition: string | undefined;
|
||||
public readonly internalQuery: string | undefined;
|
||||
public readonly where: string | undefined;
|
||||
public readonly schema: string | undefined;
|
||||
public readonly table: string | undefined;
|
||||
public readonly column: string | undefined;
|
||||
public readonly dataType: string | undefined;
|
||||
public readonly constraint: string | undefined;
|
||||
public readonly file: string | undefined;
|
||||
public readonly line: string | undefined;
|
||||
public readonly routine: string | undefined;
|
||||
public readonly errno?: string | undefined;
|
||||
public readonly detail?: string | undefined;
|
||||
public readonly hint?: string | undefined;
|
||||
public readonly severity?: string | undefined;
|
||||
public readonly position?: string | undefined;
|
||||
public readonly internalPosition?: string | undefined;
|
||||
public readonly internalQuery?: string | undefined;
|
||||
public readonly where?: string | undefined;
|
||||
public readonly schema?: string | undefined;
|
||||
public readonly table?: string | undefined;
|
||||
public readonly column?: string | undefined;
|
||||
public readonly dataType?: string | undefined;
|
||||
public readonly constraint?: string | undefined;
|
||||
public readonly file?: string | undefined;
|
||||
public readonly line?: string | undefined;
|
||||
public readonly routine?: string | undefined;
|
||||
|
||||
constructor(
|
||||
message: string,
|
||||
@@ -84,8 +146,8 @@ declare module "bun" {
|
||||
|
||||
class MySQLError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: number | undefined;
|
||||
public readonly sqlState: string | undefined;
|
||||
public readonly errno?: number | undefined;
|
||||
public readonly sqlState?: string | undefined;
|
||||
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
|
||||
}
|
||||
|
||||
@@ -143,13 +205,13 @@ declare module "bun" {
|
||||
|
||||
/**
|
||||
* Database server hostname
|
||||
* @deprecated Prefer {@link hostname}
|
||||
* @default "localhost"
|
||||
*/
|
||||
host?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname (alias for host)
|
||||
* @deprecated Prefer {@link host}
|
||||
* Database server hostname
|
||||
* @default "localhost"
|
||||
*/
|
||||
hostname?: string | undefined;
|
||||
@@ -264,13 +326,14 @@ declare module "bun" {
|
||||
* Whether to use TLS/SSL for the connection
|
||||
* @default false
|
||||
*/
|
||||
tls?: TLSOptions | boolean | undefined;
|
||||
tls?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection (alias for tls)
|
||||
* @deprecated Prefer {@link tls}
|
||||
* @default false
|
||||
*/
|
||||
ssl?: TLSOptions | boolean | undefined;
|
||||
ssl?: Bun.BunFile | TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Unix domain socket path for connection
|
||||
@@ -629,6 +692,21 @@ declare module "bun" {
|
||||
*/
|
||||
reserve(): Promise<ReservedSQL>;
|
||||
|
||||
/**
|
||||
* Creates a new SQL array parameter
|
||||
* @param values - The values to create the array parameter from
|
||||
* @param typeNameOrTypeID - The type name or type ID to create the array parameter from, if omitted it will default to JSON
|
||||
* @returns A new SQL array parameter
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const array = sql.array([1, 2, 3], "INT");
|
||||
* await sql`CREATE TABLE users_posts (user_id INT, posts_id INT[])`;
|
||||
* await sql`INSERT INTO users_posts (user_id, posts_id) VALUES (${user.id}, ${array})`;
|
||||
* ```
|
||||
*/
|
||||
array(values: any[], typeNameOrTypeID?: number | ArrayType): SQLArrayParameter;
|
||||
|
||||
/**
|
||||
* Begins a new transaction.
|
||||
*
|
||||
|
||||
325
packages/bun-types/test.d.ts
vendored
325
packages/bun-types/test.d.ts
vendored
@@ -14,11 +14,6 @@
|
||||
* ```
|
||||
*/
|
||||
declare module "bun:test" {
|
||||
/**
|
||||
* -- Mocks --
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export type Mock<T extends (...args: any[]) => any> = JestMock.Mock<T>;
|
||||
|
||||
export const mock: {
|
||||
@@ -96,6 +91,7 @@ declare module "bun:test" {
|
||||
export namespace jest {
|
||||
function restoreAllMocks(): void;
|
||||
function clearAllMocks(): void;
|
||||
function resetAllMocks(): void;
|
||||
function fn<T extends (...args: any[]) => any>(func?: T): Mock<T>;
|
||||
function setSystemTime(now?: number | Date): void;
|
||||
function setTimeout(milliseconds: number): void;
|
||||
@@ -185,6 +181,9 @@ declare module "bun:test" {
|
||||
* Clear all mock state (calls, results, etc.) without restoring original implementation
|
||||
*/
|
||||
clearAllMocks: typeof jest.clearAllMocks;
|
||||
resetAllMocks: typeof jest.resetAllMocks;
|
||||
useFakeTimers: typeof jest.useFakeTimers;
|
||||
useRealTimers: typeof jest.useRealTimers;
|
||||
};
|
||||
|
||||
interface FunctionLike {
|
||||
@@ -211,31 +210,31 @@ declare module "bun:test" {
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export interface Describe {
|
||||
export interface Describe<T extends Readonly<any[]>> {
|
||||
(fn: () => void): void;
|
||||
|
||||
(label: DescribeLabel, fn: () => void): void;
|
||||
(label: DescribeLabel, fn: (...args: T) => void): void;
|
||||
/**
|
||||
* Skips all other tests, except this group of tests.
|
||||
*
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
only(label: DescribeLabel, fn: () => void): void;
|
||||
only: Describe<T>;
|
||||
/**
|
||||
* Skips this group of tests.
|
||||
*
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
skip(label: DescribeLabel, fn: () => void): void;
|
||||
skip: Describe<T>;
|
||||
/**
|
||||
* Marks this group of tests as to be written or to be fixed.
|
||||
*
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
todo(label: DescribeLabel, fn?: () => void): void;
|
||||
todo: Describe<T>;
|
||||
/**
|
||||
* Marks this group of tests to be executed concurrently.
|
||||
*/
|
||||
concurrent: Describe<T>;
|
||||
/**
|
||||
* Marks this group of tests to be executed serially (one after another),
|
||||
* even when the --concurrent flag is used.
|
||||
*/
|
||||
serial: Describe<T>;
|
||||
/**
|
||||
* Runs this group of tests, only if `condition` is true.
|
||||
*
|
||||
@@ -243,37 +242,27 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param condition if these tests should run
|
||||
*/
|
||||
if(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
|
||||
if(condition: boolean): Describe<T>;
|
||||
/**
|
||||
* Skips this group of tests, if `condition` is true.
|
||||
*
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
skipIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
|
||||
skipIf(condition: boolean): Describe<T>;
|
||||
/**
|
||||
* Marks this group of tests as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
todoIf(condition: boolean): (label: DescribeLabel, fn: () => void) => void;
|
||||
todoIf(condition: boolean): Describe<T>;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends Readonly<[any, ...any[]]>>(
|
||||
table: readonly T[],
|
||||
): (label: DescribeLabel, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
each<T extends any[]>(
|
||||
table: readonly T[],
|
||||
): (
|
||||
label: DescribeLabel,
|
||||
fn: (...args: Readonly<T>) => void | Promise<unknown>,
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
each<T>(
|
||||
table: T[],
|
||||
): (label: DescribeLabel, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Describe<[...T]>;
|
||||
each<T extends any[]>(table: readonly T[]): Describe<[...T]>;
|
||||
each<T>(table: T[]): Describe<[T]>;
|
||||
}
|
||||
/**
|
||||
* Describes a group of related tests.
|
||||
@@ -291,7 +280,7 @@ declare module "bun:test" {
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
export const describe: Describe;
|
||||
export const describe: Describe<[]>;
|
||||
/**
|
||||
* Skips a group of related tests.
|
||||
*
|
||||
@@ -300,7 +289,9 @@ declare module "bun:test" {
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
export const xdescribe: Describe;
|
||||
export const xdescribe: Describe<[]>;
|
||||
|
||||
type HookOptions = number | { timeout?: number };
|
||||
/**
|
||||
* Runs a function, once, before all the tests.
|
||||
*
|
||||
@@ -317,7 +308,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param fn the function to run
|
||||
*/
|
||||
export function beforeAll(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
|
||||
export function beforeAll(
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Runs a function before each test.
|
||||
*
|
||||
@@ -328,7 +322,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param fn the function to run
|
||||
*/
|
||||
export function beforeEach(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
|
||||
export function beforeEach(
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Runs a function, once, after all the tests.
|
||||
*
|
||||
@@ -345,7 +342,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param fn the function to run
|
||||
*/
|
||||
export function afterAll(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
|
||||
export function afterAll(
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Runs a function after each test.
|
||||
*
|
||||
@@ -354,7 +354,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param fn the function to run
|
||||
*/
|
||||
export function afterEach(fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void)): void;
|
||||
export function afterEach(
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: HookOptions,
|
||||
): void;
|
||||
/**
|
||||
* Sets the default timeout for all tests in the current file. If a test specifies a timeout, it will
|
||||
* override this value. The default timeout is 5000ms (5 seconds).
|
||||
@@ -387,6 +390,11 @@ declare module "bun:test" {
|
||||
*/
|
||||
repeats?: number;
|
||||
}
|
||||
type IsTuple<T> = T extends readonly unknown[]
|
||||
? number extends T["length"]
|
||||
? false // It's an array with unknown length, not a tuple
|
||||
: true // It's an array with a fixed length (a tuple)
|
||||
: false; // Not an array at all
|
||||
/**
|
||||
* Runs a test.
|
||||
*
|
||||
@@ -410,10 +418,10 @@ declare module "bun:test" {
|
||||
*
|
||||
* @category Testing
|
||||
*/
|
||||
export interface Test {
|
||||
export interface Test<T extends Readonly<any[]>> {
|
||||
(
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
fn: (...args: IsTuple<T> extends true ? [...T, (err?: unknown) => void] : T) => void | Promise<unknown>,
|
||||
/**
|
||||
* - If a `number`, sets the timeout for the test in milliseconds.
|
||||
* - If an `object`, sets the options for the test.
|
||||
@@ -424,29 +432,13 @@ declare module "bun:test" {
|
||||
options?: number | TestOptions,
|
||||
): void;
|
||||
/**
|
||||
* Skips all other tests, except this test when run with the `--only` option.
|
||||
*
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
* @param options the test timeout or options
|
||||
* Skips all other tests, except this test.
|
||||
*/
|
||||
only(
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
): void;
|
||||
only: Test<T>;
|
||||
/**
|
||||
* Skips this test.
|
||||
*
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
* @param options the test timeout or options
|
||||
*/
|
||||
skip(
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
): void;
|
||||
skip: Test<T>;
|
||||
/**
|
||||
* Marks this test as to be written or to be fixed.
|
||||
*
|
||||
@@ -454,16 +446,8 @@ declare module "bun:test" {
|
||||
* if the test passes, the test will be marked as `fail` in the results; you will have to
|
||||
* remove the `.todo` or check that your test
|
||||
* is implemented correctly.
|
||||
*
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
* @param options the test timeout or options
|
||||
*/
|
||||
todo(
|
||||
label: string,
|
||||
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
): void;
|
||||
todo: Test<T>;
|
||||
/**
|
||||
* Marks this test as failing.
|
||||
*
|
||||
@@ -474,16 +458,17 @@ declare module "bun:test" {
|
||||
*
|
||||
* `test.failing` is very similar to {@link test.todo} except that it always
|
||||
* runs, regardless of the `--todo` flag.
|
||||
*
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
* @param options the test timeout or options
|
||||
*/
|
||||
failing(
|
||||
label: string,
|
||||
fn?: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
): void;
|
||||
failing: Test<T>;
|
||||
/**
|
||||
* Runs the test concurrently with other concurrent tests.
|
||||
*/
|
||||
concurrent: Test<T>;
|
||||
/**
|
||||
* Forces the test to run serially (not in parallel),
|
||||
* even when the --concurrent flag is used.
|
||||
*/
|
||||
serial: Test<T>;
|
||||
/**
|
||||
* Runs this test, if `condition` is true.
|
||||
*
|
||||
@@ -491,51 +476,46 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param condition if the test should run
|
||||
*/
|
||||
if(
|
||||
condition: boolean,
|
||||
): (
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
if(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Skips this test, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should be skipped
|
||||
*/
|
||||
skipIf(
|
||||
condition: boolean,
|
||||
): (
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
skipIf(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Marks this test as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should be marked TODO
|
||||
*/
|
||||
todoIf(
|
||||
condition: boolean,
|
||||
): (
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
todoIf(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Marks this test as failing, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should be marked as failing
|
||||
*/
|
||||
failingIf(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Runs the test concurrently with other concurrent tests, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should run concurrently
|
||||
*/
|
||||
concurrentIf(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Forces the test to run serially (not in parallel), if `condition` is true.
|
||||
* This applies even when the --concurrent flag is used.
|
||||
*
|
||||
* @param condition if the test should run serially
|
||||
*/
|
||||
serialIf(condition: boolean): Test<T>;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
each<T extends Readonly<[any, ...any[]]>>(
|
||||
table: readonly T[],
|
||||
): (label: string, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
each<T extends any[]>(
|
||||
table: readonly T[],
|
||||
): (label: string, fn: (...args: Readonly<T>) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
each<T>(
|
||||
table: T[],
|
||||
): (label: string, fn: (...args: T[]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
each<T extends Readonly<[any, ...any[]]>>(table: readonly T[]): Test<[...T]>;
|
||||
each<T extends any[]>(table: readonly T[]): Test<[...T]>;
|
||||
each<T>(table: T[]): Test<[T]>;
|
||||
}
|
||||
/**
|
||||
* Runs a test.
|
||||
@@ -553,7 +533,7 @@ declare module "bun:test" {
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
*/
|
||||
export const test: Test;
|
||||
export const test: Test<[]>;
|
||||
export { test as it, xtest as xit };
|
||||
|
||||
/**
|
||||
@@ -564,7 +544,7 @@ declare module "bun:test" {
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
*/
|
||||
export const xtest: Test;
|
||||
export const xtest: Test<[]>;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches some criteria.
|
||||
@@ -588,7 +568,9 @@ declare module "bun:test" {
|
||||
* @param customFailMessage an optional custom message to display if the test fails.
|
||||
* */
|
||||
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T>;
|
||||
(actual?: never, customFailMessage?: string): Matchers<undefined>;
|
||||
<T = unknown>(actual: T, customFailMessage?: string): Matchers<T>;
|
||||
<T = unknown>(actual?: T, customFailMessage?: string): Matchers<T | undefined>;
|
||||
|
||||
/**
|
||||
* Access to negated asymmetric matchers.
|
||||
@@ -906,6 +888,7 @@ declare module "bun:test" {
|
||||
* @param message the message to display if the test fails (optional)
|
||||
*/
|
||||
pass: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Assertion which fails.
|
||||
*
|
||||
@@ -917,6 +900,7 @@ declare module "bun:test" {
|
||||
* expect().not.fail("hi");
|
||||
*/
|
||||
fail: (message?: string) => void;
|
||||
|
||||
/**
|
||||
* Asserts that a value equals what is expected.
|
||||
*
|
||||
@@ -930,9 +914,15 @@ declare module "bun:test" {
|
||||
* expect([123]).toBe([123]); // fail, use toEqual()
|
||||
* expect(3 + 0.14).toBe(3.14); // fail, use toBeCloseTo()
|
||||
*
|
||||
* // TypeScript errors:
|
||||
* expect("hello").toBe(3.14); // typescript error + fail
|
||||
* expect("hello").toBe<number>(3.14); // no typescript error, but still fails
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBe(expected: T): void;
|
||||
toBe<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is odd.
|
||||
*
|
||||
@@ -942,6 +932,7 @@ declare module "bun:test" {
|
||||
* expect(2).not.toBeOdd();
|
||||
*/
|
||||
toBeOdd(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a number is even.
|
||||
*
|
||||
@@ -951,6 +942,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeEven();
|
||||
*/
|
||||
toBeEven(): void;
|
||||
|
||||
/**
|
||||
* Asserts that value is close to the expected by floating point precision.
|
||||
*
|
||||
@@ -969,6 +961,7 @@ declare module "bun:test" {
|
||||
* @param numDigits the number of digits to check after the decimal point. Default is `2`
|
||||
*/
|
||||
toBeCloseTo(expected: number, numDigits?: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply equal to what is expected.
|
||||
*
|
||||
@@ -981,6 +974,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toEqual(expected: T): void;
|
||||
toEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is deeply and strictly equal to
|
||||
* what is expected.
|
||||
@@ -1005,6 +1000,8 @@ declare module "bun:test" {
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toStrictEqual(expected: T): void;
|
||||
toStrictEqual<X = T>(expected: NoInfer<X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that the value is deep equal to an element in the expected array.
|
||||
*
|
||||
@@ -1017,7 +1014,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toBeOneOf(expected: Array<unknown> | Iterable<unknown>): void;
|
||||
toBeOneOf(expected: Iterable<T>): void;
|
||||
toBeOneOf<X = T>(expected: NoInfer<Iterable<X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains what is expected.
|
||||
*
|
||||
@@ -1031,7 +1030,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContain(expected: unknown): void;
|
||||
toContain(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContain<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains a key.
|
||||
*
|
||||
@@ -1045,7 +1046,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKey(expected: unknown): void;
|
||||
toContainKey(expected: keyof T): void;
|
||||
toContainKey<X = T>(expected: NoInfer<keyof X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
*
|
||||
@@ -1060,7 +1063,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllKeys(expected: unknown): void;
|
||||
toContainAllKeys(expected: Array<keyof T>): void;
|
||||
toContainAllKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains at least one of the provided keys.
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1075,12 +1080,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyKeys(expected: unknown): void;
|
||||
toContainAnyKeys(expected: Array<keyof T>): void;
|
||||
toContainAnyKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* The value must be an object
|
||||
* This method is deep and will look through child properties to find the
|
||||
* expected value.
|
||||
*
|
||||
* The input value must be an object.
|
||||
*
|
||||
* @example
|
||||
* const shallow = { hello: "world" };
|
||||
@@ -1104,11 +1113,16 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
// Contributor note: In theory we could type this better but it would be a
|
||||
// slow union to compute...
|
||||
toContainValue(expected: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain the provided value.
|
||||
*
|
||||
* This is the same as {@link toContainValue}, but accepts an array of
|
||||
* values instead.
|
||||
*
|
||||
* The value must be an object
|
||||
*
|
||||
* @example
|
||||
@@ -1118,7 +1132,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainValues(['qux', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainValues(expected: unknown): void;
|
||||
toContainValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain all the provided values.
|
||||
@@ -1132,7 +1146,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAllValues(['bar', 'foo']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAllValues(expected: unknown): void;
|
||||
toContainAllValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contain any provided value.
|
||||
@@ -1147,7 +1161,7 @@ declare module "bun:test" {
|
||||
* expect(o).not.toContainAnyValues(['qux']);
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainAnyValues(expected: unknown): void;
|
||||
toContainAnyValues(expected: Array<unknown>): void;
|
||||
|
||||
/**
|
||||
* Asserts that an `object` contains all the provided keys.
|
||||
@@ -1159,7 +1173,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainKeys(expected: unknown): void;
|
||||
toContainKeys(expected: Array<keyof T>): void;
|
||||
toContainKeys<X = T>(expected: NoInfer<Array<keyof X>>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value contains and equals what is expected.
|
||||
*
|
||||
@@ -1172,7 +1188,9 @@ declare module "bun:test" {
|
||||
*
|
||||
* @param expected the expected value
|
||||
*/
|
||||
toContainEqual(expected: unknown): void;
|
||||
toContainEqual(expected: T extends Iterable<infer U> ? U : T): void;
|
||||
toContainEqual<X = T>(expected: NoInfer<X extends Iterable<infer U> ? U : X>): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a `.length` property
|
||||
* that is equal to the expected length.
|
||||
@@ -1184,6 +1202,7 @@ declare module "bun:test" {
|
||||
* @param length the expected length
|
||||
*/
|
||||
toHaveLength(length: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value has a property with the
|
||||
* expected name, and value if provided.
|
||||
@@ -1198,6 +1217,7 @@ declare module "bun:test" {
|
||||
* @param value the expected property value, if provided
|
||||
*/
|
||||
toHaveProperty(keyPath: string | number | Array<string | number>, value?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "truthy".
|
||||
*
|
||||
@@ -1210,6 +1230,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeTruthy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is "falsy".
|
||||
*
|
||||
@@ -1222,6 +1243,7 @@ declare module "bun:test" {
|
||||
* expect({}).toBeTruthy();
|
||||
*/
|
||||
toBeFalsy(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is defined. (e.g. is not `undefined`)
|
||||
*
|
||||
@@ -1230,6 +1252,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeDefined(); // fail
|
||||
*/
|
||||
toBeDefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that the expected value is an instance of value
|
||||
*
|
||||
@@ -1238,6 +1261,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeInstanceOf(Array); // fail
|
||||
*/
|
||||
toBeInstanceOf(value: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `undefined`.
|
||||
*
|
||||
@@ -1246,6 +1270,7 @@ declare module "bun:test" {
|
||||
* expect(null).toBeUndefined(); // fail
|
||||
*/
|
||||
toBeUndefined(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null`.
|
||||
*
|
||||
@@ -1254,6 +1279,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNull(); // fail
|
||||
*/
|
||||
toBeNull(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `NaN`.
|
||||
*
|
||||
@@ -1265,6 +1291,7 @@ declare module "bun:test" {
|
||||
* expect("notanumber").toBeNaN(); // fail
|
||||
*/
|
||||
toBeNaN(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than the expected value.
|
||||
*
|
||||
@@ -1276,6 +1303,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is greater than or equal to the expected value.
|
||||
*
|
||||
@@ -1287,6 +1315,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeGreaterThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than the expected value.
|
||||
*
|
||||
@@ -1298,6 +1327,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThan(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number` and is less than or equal to the expected value.
|
||||
*
|
||||
@@ -1309,6 +1339,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected number
|
||||
*/
|
||||
toBeLessThanOrEqual(expected: number | bigint): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1329,6 +1360,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected error, error message, or error pattern
|
||||
*/
|
||||
toThrow(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
@@ -1350,6 +1382,7 @@ declare module "bun:test" {
|
||||
* @alias toThrow
|
||||
*/
|
||||
toThrowError(expected?: unknown): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a regular expression or includes a substring.
|
||||
*
|
||||
@@ -1360,6 +1393,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring or pattern.
|
||||
*/
|
||||
toMatch(expected: string | RegExp): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1368,6 +1402,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent snapshot.
|
||||
*
|
||||
@@ -1380,6 +1415,7 @@ declare module "bun:test" {
|
||||
* @param hint Hint used to identify the snapshot in the snapshot file.
|
||||
*/
|
||||
toMatchSnapshot(propertyMatchers?: object, hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1390,6 +1426,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches the most recent inline snapshot.
|
||||
*
|
||||
@@ -1405,6 +1442,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toMatchInlineSnapshot(propertyMatchers?: object, value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1418,6 +1456,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingSnapshot(hint?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a function throws an error matching the most recent snapshot.
|
||||
*
|
||||
@@ -1431,6 +1470,7 @@ declare module "bun:test" {
|
||||
* @param value The latest automatically-updated snapshot value.
|
||||
*/
|
||||
toThrowErrorMatchingInlineSnapshot(value?: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that an object matches a subset of properties.
|
||||
*
|
||||
@@ -1441,6 +1481,7 @@ declare module "bun:test" {
|
||||
* @param subset Subset of properties to match with.
|
||||
*/
|
||||
toMatchObject(subset: object): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is empty.
|
||||
*
|
||||
@@ -1451,6 +1492,7 @@ declare module "bun:test" {
|
||||
* expect(new Set()).toBeEmpty();
|
||||
*/
|
||||
toBeEmpty(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an empty `object`.
|
||||
*
|
||||
@@ -1459,6 +1501,7 @@ declare module "bun:test" {
|
||||
* expect({ a: 'hello' }).not.toBeEmptyObject();
|
||||
*/
|
||||
toBeEmptyObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `null` or `undefined`.
|
||||
*
|
||||
@@ -1467,6 +1510,7 @@ declare module "bun:test" {
|
||||
* expect(undefined).toBeNil();
|
||||
*/
|
||||
toBeNil(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array`.
|
||||
*
|
||||
@@ -1477,6 +1521,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArray();
|
||||
*/
|
||||
toBeArray(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `array` of a certain length.
|
||||
*
|
||||
@@ -1488,6 +1533,7 @@ declare module "bun:test" {
|
||||
* expect({}).not.toBeArrayOfSize(0);
|
||||
*/
|
||||
toBeArrayOfSize(size: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `boolean`.
|
||||
*
|
||||
@@ -1498,6 +1544,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeBoolean();
|
||||
*/
|
||||
toBeBoolean(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `true`.
|
||||
*
|
||||
@@ -1507,6 +1554,7 @@ declare module "bun:test" {
|
||||
* expect(1).not.toBeTrue();
|
||||
*/
|
||||
toBeTrue(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches a specific type.
|
||||
*
|
||||
@@ -1517,6 +1565,7 @@ declare module "bun:test" {
|
||||
* expect([]).not.toBeTypeOf("boolean");
|
||||
*/
|
||||
toBeTypeOf(type: "bigint" | "boolean" | "function" | "number" | "object" | "string" | "symbol" | "undefined"): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is `false`.
|
||||
*
|
||||
@@ -1526,6 +1575,7 @@ declare module "bun:test" {
|
||||
* expect(0).not.toBeFalse();
|
||||
*/
|
||||
toBeFalse(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`.
|
||||
*
|
||||
@@ -1536,6 +1586,7 @@ declare module "bun:test" {
|
||||
* expect(BigInt(1)).not.toBeNumber();
|
||||
*/
|
||||
toBeNumber(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is an integer.
|
||||
*
|
||||
@@ -1545,6 +1596,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeInteger();
|
||||
*/
|
||||
toBeInteger(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is an `object`.
|
||||
*
|
||||
@@ -1554,6 +1606,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeObject();
|
||||
*/
|
||||
toBeObject(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `number`, and is not `NaN` or `Infinity`.
|
||||
*
|
||||
@@ -1564,6 +1617,7 @@ declare module "bun:test" {
|
||||
* expect(Infinity).not.toBeFinite();
|
||||
*/
|
||||
toBeFinite(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a positive `number`.
|
||||
*
|
||||
@@ -1573,6 +1627,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBePositive();
|
||||
*/
|
||||
toBePositive(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a negative `number`.
|
||||
*
|
||||
@@ -1582,6 +1637,7 @@ declare module "bun:test" {
|
||||
* expect(NaN).not.toBeNegative();
|
||||
*/
|
||||
toBeNegative(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a number between a start and end value.
|
||||
*
|
||||
@@ -1589,6 +1645,7 @@ declare module "bun:test" {
|
||||
* @param end the end number (exclusive)
|
||||
*/
|
||||
toBeWithin(start: number, end: number): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is equal to the expected string, ignoring any whitespace.
|
||||
*
|
||||
@@ -1599,6 +1656,7 @@ declare module "bun:test" {
|
||||
* @param expected the expected string
|
||||
*/
|
||||
toEqualIgnoringWhitespace(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `symbol`.
|
||||
*
|
||||
@@ -1607,6 +1665,7 @@ declare module "bun:test" {
|
||||
* expect("foo").not.toBeSymbol();
|
||||
*/
|
||||
toBeSymbol(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `function`.
|
||||
*
|
||||
@@ -1614,6 +1673,7 @@ declare module "bun:test" {
|
||||
* expect(() => {}).toBeFunction();
|
||||
*/
|
||||
toBeFunction(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `Date` object.
|
||||
*
|
||||
@@ -1625,6 +1685,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeDate();
|
||||
*/
|
||||
toBeDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a valid `Date` object.
|
||||
*
|
||||
@@ -1634,6 +1695,7 @@ declare module "bun:test" {
|
||||
* expect("2020-03-01").not.toBeValidDate();
|
||||
*/
|
||||
toBeValidDate(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value is a `string`.
|
||||
*
|
||||
@@ -1643,6 +1705,7 @@ declare module "bun:test" {
|
||||
* expect(123).not.toBeString();
|
||||
*/
|
||||
toBeString(): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string`.
|
||||
*
|
||||
@@ -1651,12 +1714,14 @@ declare module "bun:test" {
|
||||
* @param expected the expected substring
|
||||
*/
|
||||
toInclude(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value includes a `string` {times} times.
|
||||
* @param expected the expected substring
|
||||
* @param times the number of times the substring should occur
|
||||
*/
|
||||
toIncludeRepeated(expected: string, times: number): void;
|
||||
|
||||
/**
|
||||
* Checks whether a value satisfies a custom condition.
|
||||
* @param {Function} predicate - The custom condition to be satisfied. It should be a function that takes a value as an argument (in this case the value from expect) and returns a boolean.
|
||||
@@ -1668,18 +1733,21 @@ declare module "bun:test" {
|
||||
* @link https://jest-extended.jestcommunity.dev/docs/matchers/toSatisfy
|
||||
*/
|
||||
toSatisfy(predicate: (value: T) => boolean): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value starts with a `string`.
|
||||
*
|
||||
* @param expected the string to start with
|
||||
*/
|
||||
toStartWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Asserts that a value ends with a `string`.
|
||||
*
|
||||
* @param expected the string to end with
|
||||
*/
|
||||
toEndWith(expected: string): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function has returned successfully at least once.
|
||||
*
|
||||
@@ -1720,42 +1788,51 @@ declare module "bun:test" {
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
* @alias toHaveBeenCalled
|
||||
*/
|
||||
toBeCalled(): void;
|
||||
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledTimes
|
||||
*/
|
||||
toBeCalledTimes(expected: number): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
toHaveBeenCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
toBeCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the last call.
|
||||
*/
|
||||
toHaveBeenLastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
lastCalledWith(...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
*/
|
||||
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
|
||||
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
|
||||
@@ -25,6 +25,23 @@
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug network traffic logging
|
||||
static FILE *debug_recv_file = NULL;
|
||||
static FILE *debug_send_file = NULL;
|
||||
static int debug_logging_initialized = 0;
|
||||
|
||||
static void init_debug_logging() {
|
||||
if (debug_logging_initialized) return;
|
||||
debug_logging_initialized = 1;
|
||||
|
||||
const char *recv_path = getenv("BUN_RECV");
|
||||
const char *send_path = getenv("BUN_SEND");
|
||||
if (recv_path) if (!debug_recv_file) debug_recv_file = fopen(recv_path, "w");
|
||||
if (send_path) if (!debug_send_file) debug_send_file = fopen(send_path, "w");
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
// Necessary for the stdint include
|
||||
#ifndef _GNU_SOURCE
|
||||
@@ -721,6 +738,17 @@ ssize_t bsd_recv(LIBUS_SOCKET_DESCRIPTOR fd, void *buf, int length, int flags) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for received data
|
||||
if (ret > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_recv_file) {
|
||||
fwrite(buf, 1, ret, debug_recv_file);
|
||||
fflush(debug_recv_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@@ -788,6 +816,17 @@ ssize_t bsd_send(LIBUS_SOCKET_DESCRIPTOR fd, const char *buf, int length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
#if BUN_DEBUG
|
||||
// Debug logging for sent data
|
||||
if (rc > 0) {
|
||||
init_debug_logging();
|
||||
if (debug_send_file) {
|
||||
fwrite(buf, 1, rc, debug_send_file);
|
||||
fflush(debug_send_file);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return rc;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,10 +6,46 @@
|
||||
#include <atomic>
|
||||
#include <string.h>
|
||||
#include "./default_ciphers.h"
|
||||
|
||||
// System-specific includes for certificate loading
|
||||
#include "./root_certs_platform.h"
|
||||
#ifdef _WIN32
|
||||
#include <windows.h>
|
||||
#include <wincrypt.h>
|
||||
#else
|
||||
// Linux/Unix includes
|
||||
#include <dirent.h>
|
||||
#include <stdio.h>
|
||||
#include <limits.h>
|
||||
#endif
|
||||
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
|
||||
|
||||
extern "C" void BUN__warn__extra_ca_load_failed(const char* filename, const char* error_msg);
|
||||
|
||||
// Forward declarations for platform-specific functions
|
||||
// (Actual implementations are in platform-specific files)
|
||||
|
||||
// External variable from Zig CLI arguments
|
||||
extern "C" bool Bun__Node__UseSystemCA;
|
||||
|
||||
// Helper function to check if system CA should be used
|
||||
// Checks both CLI flag (--use-system-ca) and environment variable (NODE_USE_SYSTEM_CA=1)
|
||||
static bool us_should_use_system_ca() {
|
||||
// Check CLI flag first
|
||||
if (Bun__Node__UseSystemCA) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check environment variable
|
||||
const char *use_system_ca = getenv("NODE_USE_SYSTEM_CA");
|
||||
return use_system_ca && strcmp(use_system_ca, "1") == 0;
|
||||
}
|
||||
|
||||
// Platform-specific system certificate loading implementations are separated:
|
||||
// - macOS: root_certs_darwin.cpp (Security framework with dynamic loading)
|
||||
// - Windows: root_certs_windows.cpp (Windows CryptoAPI)
|
||||
// - Linux/Unix: us_load_system_certificates_linux() below
|
||||
|
||||
// This callback is used to avoid the default passphrase callback in OpenSSL
|
||||
// which will typically prompt for the passphrase. The prompting is designed
|
||||
// for the OpenSSL CLI, but works poorly for this case because it involves
|
||||
@@ -101,7 +137,8 @@ end:
|
||||
|
||||
static void us_internal_init_root_certs(
|
||||
X509 *root_cert_instances[root_certs_size],
|
||||
STACK_OF(X509) *&root_extra_cert_instances) {
|
||||
STACK_OF(X509) *&root_extra_cert_instances,
|
||||
STACK_OF(X509) *&root_system_cert_instances) {
|
||||
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
|
||||
static std::atomic_bool root_cert_instances_initialized = 0;
|
||||
|
||||
@@ -123,6 +160,17 @@ static void us_internal_init_root_certs(
|
||||
if (extra_certs && extra_certs[0]) {
|
||||
root_extra_cert_instances = us_ssl_ctx_load_all_certs_from_file(extra_certs);
|
||||
}
|
||||
|
||||
// load system certificates if NODE_USE_SYSTEM_CA=1
|
||||
if (us_should_use_system_ca()) {
|
||||
#ifdef __APPLE__
|
||||
us_load_system_certificates_macos(&root_system_cert_instances);
|
||||
#elif defined(_WIN32)
|
||||
us_load_system_certificates_windows(&root_system_cert_instances);
|
||||
#else
|
||||
us_load_system_certificates_linux(&root_system_cert_instances);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
atomic_flag_clear_explicit(&root_cert_instances_lock,
|
||||
@@ -137,12 +185,15 @@ extern "C" int us_internal_raw_root_certs(struct us_cert_string_t **out) {
|
||||
struct us_default_ca_certificates {
|
||||
X509 *root_cert_instances[root_certs_size];
|
||||
STACK_OF(X509) *root_extra_cert_instances;
|
||||
STACK_OF(X509) *root_system_cert_instances;
|
||||
};
|
||||
|
||||
us_default_ca_certificates* us_get_default_ca_certificates() {
|
||||
static us_default_ca_certificates default_ca_certificates = {{NULL}, NULL};
|
||||
static us_default_ca_certificates default_ca_certificates = {{NULL}, NULL, NULL};
|
||||
|
||||
us_internal_init_root_certs(default_ca_certificates.root_cert_instances, default_ca_certificates.root_extra_cert_instances);
|
||||
us_internal_init_root_certs(default_ca_certificates.root_cert_instances,
|
||||
default_ca_certificates.root_extra_cert_instances,
|
||||
default_ca_certificates.root_system_cert_instances);
|
||||
|
||||
return &default_ca_certificates;
|
||||
}
|
||||
@@ -151,20 +202,33 @@ STACK_OF(X509) *us_get_root_extra_cert_instances() {
|
||||
return us_get_default_ca_certificates()->root_extra_cert_instances;
|
||||
}
|
||||
|
||||
STACK_OF(X509) *us_get_root_system_cert_instances() {
|
||||
if (!us_should_use_system_ca())
|
||||
return NULL;
|
||||
// Ensure single-path initialization via us_internal_init_root_certs
|
||||
auto certs = us_get_default_ca_certificates();
|
||||
return certs->root_system_cert_instances;
|
||||
}
|
||||
|
||||
extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
X509_STORE *store = X509_STORE_new();
|
||||
if (store == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!X509_STORE_set_default_paths(store)) {
|
||||
X509_STORE_free(store);
|
||||
return NULL;
|
||||
// Only load system default paths when NODE_USE_SYSTEM_CA=1
|
||||
// Otherwise, rely on bundled certificates only (like Node.js behavior)
|
||||
if (us_should_use_system_ca()) {
|
||||
if (!X509_STORE_set_default_paths(store)) {
|
||||
X509_STORE_free(store);
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
us_default_ca_certificates *default_ca_certificates = us_get_default_ca_certificates();
|
||||
X509** root_cert_instances = default_ca_certificates->root_cert_instances;
|
||||
STACK_OF(X509) *root_extra_cert_instances = default_ca_certificates->root_extra_cert_instances;
|
||||
STACK_OF(X509) *root_system_cert_instances = default_ca_certificates->root_system_cert_instances;
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
@@ -183,8 +247,59 @@ extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
}
|
||||
}
|
||||
|
||||
if (us_should_use_system_ca() && root_system_cert_instances) {
|
||||
for (int i = 0; i < sk_X509_num(root_system_cert_instances); i++) {
|
||||
X509 *cert = sk_X509_value(root_system_cert_instances, i);
|
||||
X509_up_ref(cert);
|
||||
X509_STORE_add_cert(store, cert);
|
||||
}
|
||||
}
|
||||
|
||||
return store;
|
||||
}
|
||||
extern "C" const char *us_get_default_ciphers() {
|
||||
return DEFAULT_CIPHER_LIST;
|
||||
}
|
||||
}
|
||||
|
||||
// Platform-specific implementations for loading system certificates
|
||||
|
||||
#if defined(_WIN32)
|
||||
// Windows implementation is split to avoid header conflicts:
|
||||
// - root_certs_windows.cpp loads raw certificate data (uses Windows headers)
|
||||
// - This file converts raw data to X509* (uses OpenSSL headers)
|
||||
|
||||
#include <vector>
|
||||
|
||||
struct RawCertificate {
|
||||
std::vector<unsigned char> data;
|
||||
};
|
||||
|
||||
// Defined in root_certs_windows.cpp - loads raw certificate data
|
||||
extern void us_load_system_certificates_windows_raw(
|
||||
std::vector<RawCertificate>& raw_certs);
|
||||
|
||||
// Convert raw Windows certificates to OpenSSL X509 format
|
||||
void us_load_system_certificates_windows(STACK_OF(X509) **system_certs) {
|
||||
*system_certs = sk_X509_new_null();
|
||||
if (*system_certs == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Load raw certificates from Windows stores
|
||||
std::vector<RawCertificate> raw_certs;
|
||||
us_load_system_certificates_windows_raw(raw_certs);
|
||||
|
||||
// Convert each raw certificate to X509
|
||||
for (const auto& raw_cert : raw_certs) {
|
||||
const unsigned char* data = raw_cert.data.data();
|
||||
X509* x509_cert = d2i_X509(NULL, &data, raw_cert.data.size());
|
||||
if (x509_cert != NULL) {
|
||||
sk_X509_push(*system_certs, x509_cert);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
// Linux and other Unix-like systems - implementation is in root_certs_linux.cpp
|
||||
extern "C" void us_load_system_certificates_linux(STACK_OF(X509) **system_certs);
|
||||
#endif
|
||||
431
packages/bun-usockets/src/crypto/root_certs_darwin.cpp
Normal file
431
packages/bun-usockets/src/crypto/root_certs_darwin.cpp
Normal file
@@ -0,0 +1,431 @@
|
||||
#ifdef __APPLE__
|
||||
|
||||
#include <dlfcn.h>
|
||||
#include <CoreFoundation/CoreFoundation.h>
|
||||
#include <atomic>
|
||||
#include <openssl/x509.h>
|
||||
#include <openssl/x509_vfy.h>
|
||||
#include <stdio.h>
|
||||
|
||||
// Security framework types and constants - dynamically loaded
|
||||
typedef struct OpaqueSecCertificateRef* SecCertificateRef;
|
||||
typedef struct OpaqueSecTrustRef* SecTrustRef;
|
||||
typedef struct OpaqueSecPolicyRef* SecPolicyRef;
|
||||
typedef int32_t OSStatus;
|
||||
typedef uint32_t SecTrustSettingsDomain;
|
||||
|
||||
// Security framework constants
|
||||
enum {
|
||||
errSecSuccess = 0,
|
||||
errSecItemNotFound = -25300,
|
||||
};
|
||||
|
||||
// Trust settings domains
|
||||
enum {
|
||||
kSecTrustSettingsDomainUser = 0,
|
||||
kSecTrustSettingsDomainAdmin = 1,
|
||||
kSecTrustSettingsDomainSystem = 2,
|
||||
};
|
||||
|
||||
// Trust status enumeration
|
||||
enum class TrustStatus {
|
||||
TRUSTED,
|
||||
DISTRUSTED,
|
||||
UNSPECIFIED
|
||||
};
|
||||
|
||||
// Dynamic Security framework loader
|
||||
class SecurityFramework {
|
||||
public:
|
||||
void* handle;
|
||||
void* cf_handle;
|
||||
|
||||
// Core Foundation constants
|
||||
CFStringRef kSecClass;
|
||||
CFStringRef kSecClassCertificate;
|
||||
CFStringRef kSecMatchLimit;
|
||||
CFStringRef kSecMatchLimitAll;
|
||||
CFStringRef kSecReturnRef;
|
||||
CFStringRef kSecMatchTrustedOnly;
|
||||
CFBooleanRef kCFBooleanTrue;
|
||||
CFAllocatorRef kCFAllocatorDefault;
|
||||
CFArrayCallBacks* kCFTypeArrayCallBacks;
|
||||
CFDictionaryKeyCallBacks* kCFTypeDictionaryKeyCallBacks;
|
||||
CFDictionaryValueCallBacks* kCFTypeDictionaryValueCallBacks;
|
||||
|
||||
// Core Foundation function pointers
|
||||
CFMutableArrayRef (*CFArrayCreateMutable)(CFAllocatorRef allocator, CFIndex capacity, const CFArrayCallBacks *callBacks);
|
||||
CFArrayRef (*CFArrayCreate)(CFAllocatorRef allocator, const void **values, CFIndex numValues, const CFArrayCallBacks *callBacks);
|
||||
void (*CFArraySetValueAtIndex)(CFMutableArrayRef theArray, CFIndex idx, const void *value);
|
||||
const void* (*CFArrayGetValueAtIndex)(CFArrayRef theArray, CFIndex idx);
|
||||
CFIndex (*CFArrayGetCount)(CFArrayRef theArray);
|
||||
void (*CFRelease)(CFTypeRef cf);
|
||||
CFDictionaryRef (*CFDictionaryCreate)(CFAllocatorRef allocator, const void **keys, const void **values, CFIndex numValues, const CFDictionaryKeyCallBacks *keyCallBacks, const CFDictionaryValueCallBacks *valueCallBacks);
|
||||
const UInt8* (*CFDataGetBytePtr)(CFDataRef theData);
|
||||
CFIndex (*CFDataGetLength)(CFDataRef theData);
|
||||
|
||||
// Security framework function pointers
|
||||
OSStatus (*SecItemCopyMatching)(CFDictionaryRef query, CFTypeRef *result);
|
||||
CFDataRef (*SecCertificateCopyData)(SecCertificateRef certificate);
|
||||
OSStatus (*SecTrustCreateWithCertificates)(CFArrayRef certificates, CFArrayRef policies, SecTrustRef *trust);
|
||||
SecPolicyRef (*SecPolicyCreateSSL)(Boolean server, CFStringRef hostname);
|
||||
Boolean (*SecTrustEvaluateWithError)(SecTrustRef trust, CFErrorRef *error);
|
||||
OSStatus (*SecTrustSettingsCopyTrustSettings)(SecCertificateRef certRef, SecTrustSettingsDomain domain, CFArrayRef *trustSettings);
|
||||
|
||||
SecurityFramework() : handle(nullptr), cf_handle(nullptr),
|
||||
kSecClass(nullptr), kSecClassCertificate(nullptr),
|
||||
kSecMatchLimit(nullptr), kSecMatchLimitAll(nullptr),
|
||||
kSecReturnRef(nullptr), kSecMatchTrustedOnly(nullptr), kCFBooleanTrue(nullptr),
|
||||
kCFAllocatorDefault(nullptr), kCFTypeArrayCallBacks(nullptr),
|
||||
kCFTypeDictionaryKeyCallBacks(nullptr), kCFTypeDictionaryValueCallBacks(nullptr),
|
||||
CFArrayCreateMutable(nullptr), CFArrayCreate(nullptr),
|
||||
CFArraySetValueAtIndex(nullptr), CFArrayGetValueAtIndex(nullptr),
|
||||
CFArrayGetCount(nullptr), CFRelease(nullptr),
|
||||
CFDictionaryCreate(nullptr), CFDataGetBytePtr(nullptr), CFDataGetLength(nullptr),
|
||||
SecItemCopyMatching(nullptr), SecCertificateCopyData(nullptr),
|
||||
SecTrustCreateWithCertificates(nullptr), SecPolicyCreateSSL(nullptr),
|
||||
SecTrustEvaluateWithError(nullptr), SecTrustSettingsCopyTrustSettings(nullptr) {}
|
||||
|
||||
~SecurityFramework() {
|
||||
if (handle) {
|
||||
dlclose(handle);
|
||||
}
|
||||
if (cf_handle) {
|
||||
dlclose(cf_handle);
|
||||
}
|
||||
}
|
||||
|
||||
bool load() {
|
||||
if (handle && cf_handle) return true; // Already loaded
|
||||
|
||||
// Load CoreFoundation framework
|
||||
cf_handle = dlopen("/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", RTLD_LAZY | RTLD_LOCAL);
|
||||
if (!cf_handle) {
|
||||
fprintf(stderr, "Failed to load CoreFoundation framework: %s\n", dlerror());
|
||||
return false;
|
||||
}
|
||||
|
||||
// Load Security framework
|
||||
handle = dlopen("/System/Library/Frameworks/Security.framework/Security", RTLD_LAZY | RTLD_LOCAL);
|
||||
if (!handle) {
|
||||
fprintf(stderr, "Failed to load Security framework: %s\n", dlerror());
|
||||
dlclose(cf_handle);
|
||||
cf_handle = nullptr;
|
||||
return false;
|
||||
}
|
||||
|
||||
// Load constants and functions
|
||||
if (!load_constants()) {
|
||||
if (handle) {
|
||||
dlclose(handle);
|
||||
handle = nullptr;
|
||||
}
|
||||
if (cf_handle) {
|
||||
dlclose(cf_handle);
|
||||
cf_handle = nullptr;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!load_functions()) {
|
||||
if (handle) {
|
||||
dlclose(handle);
|
||||
handle = nullptr;
|
||||
}
|
||||
if (cf_handle) {
|
||||
dlclose(cf_handle);
|
||||
cf_handle = nullptr;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private:
|
||||
bool load_constants() {
|
||||
// Load Security framework constants
|
||||
void* ptr = dlsym(handle, "kSecClass");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecClass not found\n"); return false; }
|
||||
kSecClass = *(CFStringRef*)ptr;
|
||||
|
||||
ptr = dlsym(handle, "kSecClassCertificate");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecClassCertificate not found\n"); return false; }
|
||||
kSecClassCertificate = *(CFStringRef*)ptr;
|
||||
|
||||
ptr = dlsym(handle, "kSecMatchLimit");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecMatchLimit not found\n"); return false; }
|
||||
kSecMatchLimit = *(CFStringRef*)ptr;
|
||||
|
||||
ptr = dlsym(handle, "kSecMatchLimitAll");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecMatchLimitAll not found\n"); return false; }
|
||||
kSecMatchLimitAll = *(CFStringRef*)ptr;
|
||||
|
||||
ptr = dlsym(handle, "kSecReturnRef");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecReturnRef not found\n"); return false; }
|
||||
kSecReturnRef = *(CFStringRef*)ptr;
|
||||
|
||||
ptr = dlsym(handle, "kSecMatchTrustedOnly");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kSecMatchTrustedOnly not found\n"); return false; }
|
||||
kSecMatchTrustedOnly = *(CFStringRef*)ptr;
|
||||
|
||||
// Load CoreFoundation constants
|
||||
ptr = dlsym(cf_handle, "kCFBooleanTrue");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kCFBooleanTrue not found\n"); return false; }
|
||||
kCFBooleanTrue = *(CFBooleanRef*)ptr;
|
||||
|
||||
ptr = dlsym(cf_handle, "kCFAllocatorDefault");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kCFAllocatorDefault not found\n"); return false; }
|
||||
kCFAllocatorDefault = *(CFAllocatorRef*)ptr;
|
||||
|
||||
ptr = dlsym(cf_handle, "kCFTypeArrayCallBacks");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kCFTypeArrayCallBacks not found\n"); return false; }
|
||||
kCFTypeArrayCallBacks = (CFArrayCallBacks*)ptr;
|
||||
|
||||
ptr = dlsym(cf_handle, "kCFTypeDictionaryKeyCallBacks");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kCFTypeDictionaryKeyCallBacks not found\n"); return false; }
|
||||
kCFTypeDictionaryKeyCallBacks = (CFDictionaryKeyCallBacks*)ptr;
|
||||
|
||||
ptr = dlsym(cf_handle, "kCFTypeDictionaryValueCallBacks");
|
||||
if (!ptr) { fprintf(stderr, "DEBUG: kCFTypeDictionaryValueCallBacks not found\n"); return false; }
|
||||
kCFTypeDictionaryValueCallBacks = (CFDictionaryValueCallBacks*)ptr;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool load_functions() {
|
||||
// Load CoreFoundation functions
|
||||
CFArrayCreateMutable = (CFMutableArrayRef (*)(CFAllocatorRef, CFIndex, const CFArrayCallBacks*))dlsym(cf_handle, "CFArrayCreateMutable");
|
||||
CFArrayCreate = (CFArrayRef (*)(CFAllocatorRef, const void**, CFIndex, const CFArrayCallBacks*))dlsym(cf_handle, "CFArrayCreate");
|
||||
CFArraySetValueAtIndex = (void (*)(CFMutableArrayRef, CFIndex, const void*))dlsym(cf_handle, "CFArraySetValueAtIndex");
|
||||
CFArrayGetValueAtIndex = (const void* (*)(CFArrayRef, CFIndex))dlsym(cf_handle, "CFArrayGetValueAtIndex");
|
||||
CFArrayGetCount = (CFIndex (*)(CFArrayRef))dlsym(cf_handle, "CFArrayGetCount");
|
||||
CFRelease = (void (*)(CFTypeRef))dlsym(cf_handle, "CFRelease");
|
||||
CFDictionaryCreate = (CFDictionaryRef (*)(CFAllocatorRef, const void**, const void**, CFIndex, const CFDictionaryKeyCallBacks*, const CFDictionaryValueCallBacks*))dlsym(cf_handle, "CFDictionaryCreate");
|
||||
CFDataGetBytePtr = (const UInt8* (*)(CFDataRef))dlsym(cf_handle, "CFDataGetBytePtr");
|
||||
CFDataGetLength = (CFIndex (*)(CFDataRef))dlsym(cf_handle, "CFDataGetLength");
|
||||
|
||||
// Load Security framework functions
|
||||
SecItemCopyMatching = (OSStatus (*)(CFDictionaryRef, CFTypeRef*))dlsym(handle, "SecItemCopyMatching");
|
||||
SecCertificateCopyData = (CFDataRef (*)(SecCertificateRef))dlsym(handle, "SecCertificateCopyData");
|
||||
SecTrustCreateWithCertificates = (OSStatus (*)(CFArrayRef, CFArrayRef, SecTrustRef*))dlsym(handle, "SecTrustCreateWithCertificates");
|
||||
SecPolicyCreateSSL = (SecPolicyRef (*)(Boolean, CFStringRef))dlsym(handle, "SecPolicyCreateSSL");
|
||||
SecTrustEvaluateWithError = (Boolean (*)(SecTrustRef, CFErrorRef*))dlsym(handle, "SecTrustEvaluateWithError");
|
||||
SecTrustSettingsCopyTrustSettings = (OSStatus (*)(SecCertificateRef, SecTrustSettingsDomain, CFArrayRef*))dlsym(handle, "SecTrustSettingsCopyTrustSettings");
|
||||
|
||||
return CFArrayCreateMutable && CFArrayCreate && CFArraySetValueAtIndex &&
|
||||
CFArrayGetValueAtIndex && CFArrayGetCount && CFRelease &&
|
||||
CFDictionaryCreate && CFDataGetBytePtr && CFDataGetLength &&
|
||||
SecItemCopyMatching && SecCertificateCopyData &&
|
||||
SecTrustCreateWithCertificates && SecPolicyCreateSSL &&
|
||||
SecTrustEvaluateWithError && SecTrustSettingsCopyTrustSettings;
|
||||
}
|
||||
};
|
||||
|
||||
// Global instance for dynamic loading
|
||||
static std::atomic<SecurityFramework*> g_security_framework{nullptr};
|
||||
|
||||
static SecurityFramework* get_security_framework() {
|
||||
SecurityFramework* framework = g_security_framework.load();
|
||||
if (!framework) {
|
||||
SecurityFramework* new_framework = new SecurityFramework();
|
||||
if (new_framework->load()) {
|
||||
SecurityFramework* expected = nullptr;
|
||||
if (g_security_framework.compare_exchange_strong(expected, new_framework)) {
|
||||
framework = new_framework;
|
||||
} else {
|
||||
delete new_framework;
|
||||
framework = expected;
|
||||
}
|
||||
} else {
|
||||
delete new_framework;
|
||||
framework = nullptr;
|
||||
}
|
||||
}
|
||||
return framework;
|
||||
}
|
||||
|
||||
// Helper function to determine if a certificate is self-issued
|
||||
static bool is_certificate_self_issued(X509* cert) {
|
||||
X509_NAME* subject = X509_get_subject_name(cert);
|
||||
X509_NAME* issuer = X509_get_issuer_name(cert);
|
||||
|
||||
return subject && issuer && X509_NAME_cmp(subject, issuer) == 0;
|
||||
}
|
||||
|
||||
// Validate certificate trust using Security framework
|
||||
static bool is_certificate_trust_valid(SecurityFramework* security, SecCertificateRef cert_ref) {
|
||||
CFMutableArrayRef subj_certs = security->CFArrayCreateMutable(nullptr, 1, security->kCFTypeArrayCallBacks);
|
||||
if (!subj_certs) return false;
|
||||
|
||||
security->CFArraySetValueAtIndex(subj_certs, 0, cert_ref);
|
||||
|
||||
SecPolicyRef policy = security->SecPolicyCreateSSL(true, nullptr);
|
||||
if (!policy) {
|
||||
security->CFRelease(subj_certs);
|
||||
return false;
|
||||
}
|
||||
|
||||
CFArrayRef policies = security->CFArrayCreate(nullptr, (const void**)&policy, 1, security->kCFTypeArrayCallBacks);
|
||||
if (!policies) {
|
||||
security->CFRelease(policy);
|
||||
security->CFRelease(subj_certs);
|
||||
return false;
|
||||
}
|
||||
|
||||
SecTrustRef sec_trust = nullptr;
|
||||
OSStatus ortn = security->SecTrustCreateWithCertificates(subj_certs, policies, &sec_trust);
|
||||
|
||||
bool result = false;
|
||||
if (ortn == errSecSuccess && sec_trust) {
|
||||
result = security->SecTrustEvaluateWithError(sec_trust, nullptr);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
if (sec_trust) security->CFRelease(sec_trust);
|
||||
security->CFRelease(policies);
|
||||
security->CFRelease(policy);
|
||||
security->CFRelease(subj_certs);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Check trust settings for policy (simplified version)
|
||||
static TrustStatus is_trust_settings_trusted_for_policy(SecurityFramework* security, CFArrayRef trust_settings, bool is_self_issued) {
|
||||
if (!trust_settings) {
|
||||
return TrustStatus::UNSPECIFIED;
|
||||
}
|
||||
|
||||
// Empty trust settings array means "always trust this certificate"
|
||||
if (security->CFArrayGetCount(trust_settings) == 0) {
|
||||
return is_self_issued ? TrustStatus::TRUSTED : TrustStatus::UNSPECIFIED;
|
||||
}
|
||||
|
||||
// For simplicity, we'll do basic checking here
|
||||
// A full implementation would parse the trust dictionary entries
|
||||
return TrustStatus::UNSPECIFIED;
|
||||
}
|
||||
|
||||
// Check if certificate is trusted for server auth policy
|
||||
static bool is_certificate_trusted_for_policy(SecurityFramework* security, X509* cert, SecCertificateRef cert_ref) {
|
||||
bool is_self_issued = is_certificate_self_issued(cert);
|
||||
bool trust_evaluated = false;
|
||||
|
||||
// Check user trust domain, then admin domain
|
||||
for (const auto& trust_domain : {kSecTrustSettingsDomainUser, kSecTrustSettingsDomainAdmin, kSecTrustSettingsDomainSystem}) {
|
||||
CFArrayRef trust_settings = nullptr;
|
||||
OSStatus err = security->SecTrustSettingsCopyTrustSettings(cert_ref, trust_domain, &trust_settings);
|
||||
|
||||
if (err != errSecSuccess && err != errSecItemNotFound) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (err == errSecSuccess && trust_settings) {
|
||||
TrustStatus result = is_trust_settings_trusted_for_policy(security, trust_settings, is_self_issued);
|
||||
security->CFRelease(trust_settings);
|
||||
|
||||
if (result == TrustStatus::TRUSTED) {
|
||||
return true;
|
||||
} else if (result == TrustStatus::DISTRUSTED) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// If no trust settings and we haven't evaluated trust yet, check trust validity
|
||||
if (!trust_settings && !trust_evaluated) {
|
||||
if (is_certificate_trust_valid(security, cert_ref)) {
|
||||
return true;
|
||||
}
|
||||
trust_evaluated = true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// Main function to load system certificates on macOS
|
||||
extern "C" void us_load_system_certificates_macos(STACK_OF(X509) **system_certs) {
|
||||
*system_certs = sk_X509_new_null();
|
||||
if (!*system_certs) {
|
||||
return;
|
||||
}
|
||||
|
||||
SecurityFramework* security = get_security_framework();
|
||||
if (!security) {
|
||||
return; // Fail silently
|
||||
}
|
||||
|
||||
// Create search dictionary for certificates
|
||||
CFTypeRef search_keys[] = {
|
||||
security->kSecClass,
|
||||
security->kSecMatchLimit,
|
||||
security->kSecReturnRef,
|
||||
security->kSecMatchTrustedOnly,
|
||||
};
|
||||
CFTypeRef search_values[] = {
|
||||
security->kSecClassCertificate,
|
||||
security->kSecMatchLimitAll,
|
||||
security->kCFBooleanTrue,
|
||||
security->kCFBooleanTrue,
|
||||
};
|
||||
|
||||
CFDictionaryRef search = security->CFDictionaryCreate(
|
||||
security->kCFAllocatorDefault,
|
||||
search_keys,
|
||||
search_values,
|
||||
4,
|
||||
security->kCFTypeDictionaryKeyCallBacks,
|
||||
security->kCFTypeDictionaryValueCallBacks
|
||||
);
|
||||
|
||||
if (!search) {
|
||||
return;
|
||||
}
|
||||
|
||||
CFArrayRef certificates = nullptr;
|
||||
OSStatus status = security->SecItemCopyMatching(search, (CFTypeRef*)&certificates);
|
||||
security->CFRelease(search);
|
||||
|
||||
if (status != errSecSuccess || !certificates) {
|
||||
return;
|
||||
}
|
||||
|
||||
CFIndex count = security->CFArrayGetCount(certificates);
|
||||
|
||||
for (CFIndex i = 0; i < count; ++i) {
|
||||
SecCertificateRef cert_ref = (SecCertificateRef)security->CFArrayGetValueAtIndex(certificates, i);
|
||||
if (!cert_ref) continue;
|
||||
|
||||
// Get certificate data
|
||||
CFDataRef cert_data = security->SecCertificateCopyData(cert_ref);
|
||||
if (!cert_data) continue;
|
||||
|
||||
// Convert to X509
|
||||
const unsigned char* data_ptr = security->CFDataGetBytePtr(cert_data);
|
||||
long data_len = security->CFDataGetLength(cert_data);
|
||||
X509* x509_cert = d2i_X509(nullptr, &data_ptr, data_len);
|
||||
security->CFRelease(cert_data);
|
||||
|
||||
if (!x509_cert) continue;
|
||||
|
||||
// Only consider CA certificates
|
||||
if (X509_check_ca(x509_cert) == 1 &&
|
||||
is_certificate_trusted_for_policy(security, x509_cert, cert_ref)) {
|
||||
sk_X509_push(*system_certs, x509_cert);
|
||||
} else {
|
||||
X509_free(x509_cert);
|
||||
}
|
||||
}
|
||||
|
||||
security->CFRelease(certificates);
|
||||
}
|
||||
|
||||
// Cleanup function for Security framework
|
||||
extern "C" void us_cleanup_security_framework() {
|
||||
SecurityFramework* framework = g_security_framework.exchange(nullptr);
|
||||
if (framework) {
|
||||
delete framework;
|
||||
}
|
||||
}
|
||||
|
||||
#endif // __APPLE__
|
||||
@@ -5,6 +5,7 @@
|
||||
#define CPPDECL extern "C"
|
||||
|
||||
STACK_OF(X509) *us_get_root_extra_cert_instances();
|
||||
STACK_OF(X509) *us_get_root_system_cert_instances();
|
||||
|
||||
#else
|
||||
#define CPPDECL extern
|
||||
|
||||
170
packages/bun-usockets/src/crypto/root_certs_linux.cpp
Normal file
170
packages/bun-usockets/src/crypto/root_certs_linux.cpp
Normal file
@@ -0,0 +1,170 @@
|
||||
#ifndef _WIN32
|
||||
#ifndef __APPLE__
|
||||
|
||||
#include <dirent.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <limits.h>
|
||||
#include <openssl/x509.h>
|
||||
#include <openssl/x509_vfy.h>
|
||||
#include <openssl/pem.h>
|
||||
|
||||
extern "C" void BUN__warn__extra_ca_load_failed(const char* filename, const char* error_msg);
|
||||
|
||||
// Helper function to load certificates from a directory
|
||||
static void load_certs_from_directory(const char* dir_path, STACK_OF(X509)* cert_stack) {
|
||||
DIR* dir = opendir(dir_path);
|
||||
if (!dir) {
|
||||
return;
|
||||
}
|
||||
|
||||
struct dirent* entry;
|
||||
while ((entry = readdir(dir)) != NULL) {
|
||||
// Skip . and ..
|
||||
if (strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if file has .crt, .pem, or .cer extension
|
||||
const char* ext = strrchr(entry->d_name, '.');
|
||||
if (!ext || (strcmp(ext, ".crt") != 0 && strcmp(ext, ".pem") != 0 && strcmp(ext, ".cer") != 0)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Build full path
|
||||
char filepath[PATH_MAX];
|
||||
snprintf(filepath, sizeof(filepath), "%s/%s", dir_path, entry->d_name);
|
||||
|
||||
// Try to load certificate
|
||||
FILE* file = fopen(filepath, "r");
|
||||
if (file) {
|
||||
X509* cert = PEM_read_X509(file, NULL, NULL, NULL);
|
||||
fclose(file);
|
||||
|
||||
if (cert) {
|
||||
if (!sk_X509_push(cert_stack, cert)) {
|
||||
X509_free(cert);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
closedir(dir);
|
||||
}
|
||||
|
||||
// Helper function to load certificates from a bundle file
|
||||
static void load_certs_from_bundle(const char* bundle_path, STACK_OF(X509)* cert_stack) {
|
||||
FILE* file = fopen(bundle_path, "r");
|
||||
if (!file) {
|
||||
return;
|
||||
}
|
||||
|
||||
X509* cert;
|
||||
while ((cert = PEM_read_X509(file, NULL, NULL, NULL)) != NULL) {
|
||||
if (!sk_X509_push(cert_stack, cert)) {
|
||||
X509_free(cert);
|
||||
break;
|
||||
}
|
||||
}
|
||||
ERR_clear_error();
|
||||
|
||||
fclose(file);
|
||||
}
|
||||
|
||||
// Main function to load system certificates on Linux and other Unix-like systems
|
||||
extern "C" void us_load_system_certificates_linux(STACK_OF(X509) **system_certs) {
|
||||
*system_certs = sk_X509_new_null();
|
||||
if (*system_certs == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
// First check environment variables (same as Node.js and OpenSSL)
|
||||
const char* ssl_cert_file = getenv("SSL_CERT_FILE");
|
||||
const char* ssl_cert_dir = getenv("SSL_CERT_DIR");
|
||||
|
||||
// If SSL_CERT_FILE is set, load from it
|
||||
if (ssl_cert_file && strlen(ssl_cert_file) > 0) {
|
||||
load_certs_from_bundle(ssl_cert_file, *system_certs);
|
||||
}
|
||||
|
||||
// If SSL_CERT_DIR is set, load from each directory (colon-separated)
|
||||
if (ssl_cert_dir && strlen(ssl_cert_dir) > 0) {
|
||||
char* dir_copy = strdup(ssl_cert_dir);
|
||||
if (dir_copy) {
|
||||
char* token = strtok(dir_copy, ":");
|
||||
while (token != NULL) {
|
||||
// Skip empty tokens
|
||||
if (strlen(token) > 0) {
|
||||
load_certs_from_directory(token, *system_certs);
|
||||
}
|
||||
token = strtok(NULL, ":");
|
||||
}
|
||||
free(dir_copy);
|
||||
}
|
||||
}
|
||||
|
||||
// If environment variables were set, use only those (even if they yield zero certs)
|
||||
if (ssl_cert_file || ssl_cert_dir) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, load certificates from standard Linux/Unix paths
|
||||
// These are the common locations for system certificates
|
||||
|
||||
// Common certificate bundle locations (single file with multiple certs)
|
||||
// These paths are based on common Linux distributions and OpenSSL defaults
|
||||
static const char* bundle_paths[] = {
|
||||
"/etc/ssl/certs/ca-certificates.crt", // Debian/Ubuntu/Gentoo
|
||||
"/etc/pki/tls/certs/ca-bundle.crt", // Fedora/RHEL 6
|
||||
"/etc/ssl/ca-bundle.pem", // OpenSUSE
|
||||
"/etc/pki/tls/cert.pem", // Fedora/RHEL 7+
|
||||
"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem", // CentOS/RHEL 7+
|
||||
"/etc/ssl/cert.pem", // Alpine Linux, macOS OpenSSL
|
||||
"/usr/local/etc/openssl/cert.pem", // Homebrew OpenSSL on macOS
|
||||
"/usr/local/share/ca-certificates/ca-certificates.crt", // Custom CA installs
|
||||
NULL
|
||||
};
|
||||
|
||||
// Common certificate directory locations (multiple files)
|
||||
// Note: OpenSSL expects hashed symlinks in directories (c_rehash format)
|
||||
static const char* dir_paths[] = {
|
||||
"/etc/ssl/certs", // Common location (Debian/Ubuntu with hashed links)
|
||||
"/etc/pki/tls/certs", // RHEL/Fedora
|
||||
"/usr/share/ca-certificates", // Debian/Ubuntu (original certs, not hashed)
|
||||
"/usr/local/share/certs", // FreeBSD
|
||||
"/etc/openssl/certs", // NetBSD
|
||||
"/var/ssl/certs", // AIX
|
||||
"/usr/local/etc/openssl/certs", // Homebrew OpenSSL on macOS
|
||||
"/System/Library/OpenSSL/certs", // macOS system OpenSSL (older versions)
|
||||
NULL
|
||||
};
|
||||
|
||||
// Try loading from bundle files first
|
||||
for (const char** path = bundle_paths; *path != NULL; path++) {
|
||||
load_certs_from_bundle(*path, *system_certs);
|
||||
}
|
||||
|
||||
// Then try loading from directories
|
||||
for (const char** path = dir_paths; *path != NULL; path++) {
|
||||
load_certs_from_directory(*path, *system_certs);
|
||||
}
|
||||
|
||||
// Also check NODE_EXTRA_CA_CERTS environment variable
|
||||
const char* extra_ca_certs = getenv("NODE_EXTRA_CA_CERTS");
|
||||
if (extra_ca_certs && strlen(extra_ca_certs) > 0) {
|
||||
FILE* file = fopen(extra_ca_certs, "r");
|
||||
if (file) {
|
||||
X509* cert;
|
||||
while ((cert = PEM_read_X509(file, NULL, NULL, NULL)) != NULL) {
|
||||
sk_X509_push(*system_certs, cert);
|
||||
}
|
||||
fclose(file);
|
||||
} else {
|
||||
BUN__warn__extra_ca_load_failed(extra_ca_certs, "Failed to open file");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif // !__APPLE__
|
||||
#endif // !_WIN32
|
||||
18
packages/bun-usockets/src/crypto/root_certs_platform.h
Normal file
18
packages/bun-usockets/src/crypto/root_certs_platform.h
Normal file
@@ -0,0 +1,18 @@
|
||||
#pragma once
|
||||
|
||||
#include <openssl/x509.h>
|
||||
|
||||
// Platform-specific certificate loading functions
|
||||
extern "C" {
|
||||
|
||||
// Load system certificates for the current platform
|
||||
void us_load_system_certificates_linux(STACK_OF(X509) **system_certs);
|
||||
void us_load_system_certificates_macos(STACK_OF(X509) **system_certs);
|
||||
void us_load_system_certificates_windows(STACK_OF(X509) **system_certs);
|
||||
|
||||
// Platform-specific cleanup functions
|
||||
#ifdef __APPLE__
|
||||
void us_cleanup_security_framework();
|
||||
#endif
|
||||
|
||||
}
|
||||
53
packages/bun-usockets/src/crypto/root_certs_windows.cpp
Normal file
53
packages/bun-usockets/src/crypto/root_certs_windows.cpp
Normal file
@@ -0,0 +1,53 @@
|
||||
#ifdef _WIN32
|
||||
|
||||
#include <windows.h>
|
||||
#include <wincrypt.h>
|
||||
#include <vector>
|
||||
#include <cstring>
|
||||
|
||||
// Forward declaration to avoid including OpenSSL headers here
|
||||
// This prevents conflicts with Windows macros like X509_NAME
|
||||
// Note: We don't use STACK_OF macro here since we don't have OpenSSL headers
|
||||
|
||||
// Structure to hold raw certificate data
|
||||
struct RawCertificate {
|
||||
std::vector<unsigned char> data;
|
||||
};
|
||||
|
||||
// Helper function to load raw certificates from a Windows certificate store
|
||||
static void LoadRawCertsFromStore(std::vector<RawCertificate>& raw_certs,
|
||||
DWORD store_flags,
|
||||
const wchar_t* store_name) {
|
||||
HCERTSTORE cert_store = CertOpenStore(
|
||||
CERT_STORE_PROV_SYSTEM_W,
|
||||
0,
|
||||
0,
|
||||
store_flags | CERT_STORE_READONLY_FLAG,
|
||||
store_name
|
||||
);
|
||||
|
||||
if (cert_store == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
PCCERT_CONTEXT cert_context = NULL;
|
||||
while ((cert_context = CertEnumCertificatesInStore(cert_store, cert_context)) != NULL) {
|
||||
RawCertificate raw_cert;
|
||||
raw_cert.data.assign(cert_context->pbCertEncoded,
|
||||
cert_context->pbCertEncoded + cert_context->cbCertEncoded);
|
||||
raw_certs.push_back(std::move(raw_cert));
|
||||
}
|
||||
|
||||
CertCloseStore(cert_store, 0);
|
||||
}
|
||||
|
||||
// Main function to load raw system certificates on Windows
|
||||
// Returns certificates as raw DER data to avoid OpenSSL header conflicts
|
||||
extern void us_load_system_certificates_windows_raw(
|
||||
std::vector<RawCertificate>& raw_certs) {
|
||||
// Load only from ROOT by default
|
||||
LoadRawCertsFromStore(raw_certs, CERT_SYSTEM_STORE_CURRENT_USER, L"ROOT");
|
||||
LoadRawCertsFromStore(raw_certs, CERT_SYSTEM_STORE_LOCAL_MACHINE, L"ROOT");
|
||||
}
|
||||
|
||||
#endif // _WIN32
|
||||
@@ -22,7 +22,16 @@
|
||||
#ifndef WIN32
|
||||
#include <sys/ioctl.h>
|
||||
#endif
|
||||
|
||||
#if __has_include("wtf/Platform.h")
|
||||
#include "wtf/Platform.h"
|
||||
#elif !defined(ASSERT_ENABLED)
|
||||
#if defined(BUN_DEBUG) || defined(__has_feature) && __has_feature(address_sanitizer) || defined(__SANITIZE_ADDRESS__)
|
||||
#define ASSERT_ENABLED 1
|
||||
#else
|
||||
#define ASSERT_ENABLED 0
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#if ASSERT_ENABLED
|
||||
extern const size_t Bun__lock__size;
|
||||
|
||||
@@ -627,9 +627,15 @@ public:
|
||||
return std::move(*this);
|
||||
}
|
||||
|
||||
void setOnClose(HttpContextData<SSL>::OnSocketClosedCallback onClose) {
|
||||
void setOnSocketClosed(HttpContextData<SSL>::OnSocketClosedCallback onClose) {
|
||||
httpContext->getSocketContextData()->onSocketClosed = onClose;
|
||||
}
|
||||
void setOnSocketDrain(HttpContextData<SSL>::OnSocketDrainCallback onDrain) {
|
||||
httpContext->getSocketContextData()->onSocketDrain = onDrain;
|
||||
}
|
||||
void setOnSocketData(HttpContextData<SSL>::OnSocketDataCallback onData) {
|
||||
httpContext->getSocketContextData()->onSocketData = onData;
|
||||
}
|
||||
|
||||
void setOnClientError(HttpContextData<SSL>::OnClientErrorCallback onClientError) {
|
||||
httpContext->getSocketContextData()->onClientError = std::move(onClientError);
|
||||
|
||||
@@ -193,23 +193,32 @@ private:
|
||||
auto *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(us_socket_ext(SSL, s));
|
||||
|
||||
|
||||
|
||||
/* Call filter */
|
||||
HttpContextData<SSL> *httpContextData = getSocketContextDataS(s);
|
||||
|
||||
if(httpResponseData && httpResponseData->isConnectRequest) {
|
||||
if (httpResponseData->socketData && httpContextData->onSocketData) {
|
||||
httpContextData->onSocketData(httpResponseData->socketData, SSL, s, "", 0, true);
|
||||
}
|
||||
if(httpResponseData->inStream) {
|
||||
httpResponseData->inStream(reinterpret_cast<HttpResponse<SSL> *>(s), "", 0, true, httpResponseData->userData);
|
||||
httpResponseData->inStream = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (auto &f : httpContextData->filterHandlers) {
|
||||
f((HttpResponse<SSL> *) s, -1);
|
||||
}
|
||||
|
||||
if (httpResponseData->socketData && httpContextData->onSocketClosed) {
|
||||
httpContextData->onSocketClosed(httpResponseData->socketData, SSL, s);
|
||||
}
|
||||
/* Signal broken HTTP request only if we have a pending request */
|
||||
if (httpResponseData->onAborted != nullptr && httpResponseData->userData != nullptr) {
|
||||
httpResponseData->onAborted((HttpResponse<SSL> *)s, httpResponseData->userData);
|
||||
}
|
||||
|
||||
if (httpResponseData->socketData && httpContextData->onSocketClosed) {
|
||||
httpContextData->onSocketClosed(httpResponseData->socketData, SSL, s);
|
||||
}
|
||||
|
||||
/* Destruct socket ext */
|
||||
httpResponseData->~HttpResponseData<SSL>();
|
||||
@@ -254,7 +263,9 @@ private:
|
||||
|
||||
/* The return value is entirely up to us to interpret. The HttpParser cares only for whether the returned value is DIFFERENT from passed user */
|
||||
|
||||
auto result = httpResponseData->consumePostPadded(httpContextData->maxHeaderSize, httpContextData->flags.requireHostHeader,httpContextData->flags.useStrictMethodValidation, data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
|
||||
auto result = httpResponseData->consumePostPadded(httpContextData->maxHeaderSize, httpResponseData->isConnectRequest, httpContextData->flags.requireHostHeader,httpContextData->flags.useStrictMethodValidation, data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
|
||||
|
||||
|
||||
/* For every request we reset the timeout and hang until user makes action */
|
||||
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
|
||||
us_socket_timeout(SSL, (us_socket_t *) s, 0);
|
||||
@@ -330,7 +341,12 @@ private:
|
||||
/* Continue parsing */
|
||||
return s;
|
||||
|
||||
}, [httpResponseData](void *user, std::string_view data, bool fin) -> void * {
|
||||
}, [httpResponseData, httpContextData](void *user, std::string_view data, bool fin) -> void * {
|
||||
|
||||
|
||||
if (httpResponseData->isConnectRequest && httpResponseData->socketData && httpContextData->onSocketData) {
|
||||
httpContextData->onSocketData(httpResponseData->socketData, SSL, (struct us_socket_t *) user, data.data(), data.length(), fin);
|
||||
}
|
||||
/* We always get an empty chunk even if there is no data */
|
||||
if (httpResponseData->inStream) {
|
||||
|
||||
@@ -449,7 +465,7 @@ private:
|
||||
us_socket_context_on_writable(SSL, getSocketContext(), [](us_socket_t *s) {
|
||||
auto *asyncSocket = reinterpret_cast<AsyncSocket<SSL> *>(s);
|
||||
auto *httpResponseData = reinterpret_cast<HttpResponseData<SSL> *>(asyncSocket->getAsyncSocketData());
|
||||
|
||||
|
||||
/* Attempt to drain the socket buffer before triggering onWritable callback */
|
||||
size_t bufferedAmount = asyncSocket->getBufferedAmount();
|
||||
if (bufferedAmount > 0) {
|
||||
@@ -470,6 +486,12 @@ private:
|
||||
*/
|
||||
}
|
||||
|
||||
auto *httpContextData = getSocketContextDataS(s);
|
||||
|
||||
|
||||
if (httpResponseData->isConnectRequest && httpResponseData->socketData && httpContextData->onSocketDrain) {
|
||||
httpContextData->onSocketDrain(httpResponseData->socketData, SSL, (struct us_socket_t *) s);
|
||||
}
|
||||
/* Ask the developer to write data and return success (true) or failure (false), OR skip sending anything and return success (true). */
|
||||
if (httpResponseData->onWritable) {
|
||||
/* We are now writable, so hang timeout again, the user does not have to do anything so we should hang until end or tryEnd rearms timeout */
|
||||
@@ -514,6 +536,7 @@ private:
|
||||
us_socket_context_on_end(SSL, getSocketContext(), [](us_socket_t *s) {
|
||||
auto *asyncSocket = reinterpret_cast<AsyncSocket<SSL> *>(s);
|
||||
asyncSocket->uncorkWithoutSending();
|
||||
|
||||
/* We do not care for half closed sockets */
|
||||
return asyncSocket->close();
|
||||
});
|
||||
|
||||
@@ -44,7 +44,10 @@ struct alignas(16) HttpContextData {
|
||||
private:
|
||||
std::vector<MoveOnlyFunction<void(HttpResponse<SSL> *, int)>> filterHandlers;
|
||||
using OnSocketClosedCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnSocketDataCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket, const char *data, int length, bool last);
|
||||
using OnSocketDrainCallback = void (*)(void* userData, int is_ssl, struct us_socket_t *rawSocket);
|
||||
using OnClientErrorCallback = MoveOnlyFunction<void(int is_ssl, struct us_socket_t *rawSocket, uWS::HttpParserError errorCode, char *rawPacket, int rawPacketLength)>;
|
||||
|
||||
|
||||
MoveOnlyFunction<void(const char *hostname)> missingServerNameHandler;
|
||||
|
||||
@@ -61,6 +64,8 @@ private:
|
||||
void *upgradedWebSocket = nullptr;
|
||||
/* Used to simulate Node.js socket events. */
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
OnSocketDrainCallback onSocketDrain = nullptr;
|
||||
OnSocketDataCallback onSocketData = nullptr;
|
||||
OnClientErrorCallback onClientError = nullptr;
|
||||
|
||||
uint64_t maxHeaderSize = 0; // 0 means no limit
|
||||
|
||||
@@ -117,18 +117,19 @@ namespace uWS
|
||||
struct ConsumeRequestLineResult {
|
||||
char *position;
|
||||
bool isAncientHTTP;
|
||||
bool isConnect;
|
||||
HTTPHeaderParserError headerParserError;
|
||||
public:
|
||||
static ConsumeRequestLineResult error(HTTPHeaderParserError error) {
|
||||
return ConsumeRequestLineResult{nullptr, false, error};
|
||||
return ConsumeRequestLineResult{nullptr, false, false, error};
|
||||
}
|
||||
|
||||
static ConsumeRequestLineResult success(char *position, bool isAncientHTTP = false) {
|
||||
return ConsumeRequestLineResult{position, isAncientHTTP, HTTP_HEADER_PARSER_ERROR_NONE};
|
||||
static ConsumeRequestLineResult success(char *position, bool isAncientHTTP = false, bool isConnect = false) {
|
||||
return ConsumeRequestLineResult{position, isAncientHTTP, isConnect, HTTP_HEADER_PARSER_ERROR_NONE};
|
||||
}
|
||||
|
||||
static ConsumeRequestLineResult shortRead(bool isAncientHTTP = false) {
|
||||
return ConsumeRequestLineResult{nullptr, isAncientHTTP, HTTP_HEADER_PARSER_ERROR_NONE};
|
||||
static ConsumeRequestLineResult shortRead(bool isAncientHTTP = false, bool isConnect = false) {
|
||||
return ConsumeRequestLineResult{nullptr, isAncientHTTP, isConnect, HTTP_HEADER_PARSER_ERROR_NONE};
|
||||
}
|
||||
|
||||
bool isErrorOrShortRead() {
|
||||
@@ -551,7 +552,10 @@ namespace uWS
|
||||
return ConsumeRequestLineResult::shortRead();
|
||||
}
|
||||
|
||||
if (data[0] == 32 && (__builtin_expect(data[1] == '/', 1) || isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1)) [[likely]] {
|
||||
|
||||
bool isHTTPMethod = (__builtin_expect(data[1] == '/', 1));
|
||||
bool isConnect = !isHTTPMethod && (isHTTPorHTTPSPrefixForProxies(data + 1, end) == 1 || ((data - start) == 7 && memcmp(start, "CONNECT", 7) == 0));
|
||||
if (isHTTPMethod || isConnect) [[likely]] {
|
||||
header.key = {start, (size_t) (data - start)};
|
||||
data++;
|
||||
if(!isValidMethod(header.key, useStrictMethodValidation)) {
|
||||
@@ -577,22 +581,22 @@ namespace uWS
|
||||
if (nextPosition >= end) {
|
||||
/* Whatever we have must be part of the version string */
|
||||
if (memcmp(" HTTP/1.1\r\n", data, std::min<unsigned int>(11, (unsigned int) (end - data))) == 0) {
|
||||
return ConsumeRequestLineResult::shortRead();
|
||||
return ConsumeRequestLineResult::shortRead(false, isConnect);
|
||||
} else if (memcmp(" HTTP/1.0\r\n", data, std::min<unsigned int>(11, (unsigned int) (end - data))) == 0) {
|
||||
/*Indicates that the request line is ancient HTTP*/
|
||||
return ConsumeRequestLineResult::shortRead(true);
|
||||
return ConsumeRequestLineResult::shortRead(true, isConnect);
|
||||
}
|
||||
return ConsumeRequestLineResult::error(HTTP_HEADER_PARSER_ERROR_INVALID_HTTP_VERSION);
|
||||
}
|
||||
if (memcmp(" HTTP/1.1\r\n", data, 11) == 0) {
|
||||
return ConsumeRequestLineResult::success(nextPosition);
|
||||
return ConsumeRequestLineResult::success(nextPosition, false, isConnect);
|
||||
} else if (memcmp(" HTTP/1.0\r\n", data, 11) == 0) {
|
||||
/*Indicates that the request line is ancient HTTP*/
|
||||
return ConsumeRequestLineResult::success(nextPosition, true);
|
||||
return ConsumeRequestLineResult::success(nextPosition, true, isConnect);
|
||||
}
|
||||
/* If we stand at the post padded CR, we have fragmented input so try again later */
|
||||
if (data[0] == '\r') {
|
||||
return ConsumeRequestLineResult::shortRead();
|
||||
return ConsumeRequestLineResult::shortRead(false, isConnect);
|
||||
}
|
||||
/* This is an error */
|
||||
return ConsumeRequestLineResult::error(HTTP_HEADER_PARSER_ERROR_INVALID_HTTP_VERSION);
|
||||
@@ -602,14 +606,14 @@ namespace uWS
|
||||
|
||||
/* If we stand at the post padded CR, we have fragmented input so try again later */
|
||||
if (data[0] == '\r') {
|
||||
return ConsumeRequestLineResult::shortRead();
|
||||
return ConsumeRequestLineResult::shortRead(false, isConnect);
|
||||
}
|
||||
|
||||
if (data[0] == 32) {
|
||||
switch (isHTTPorHTTPSPrefixForProxies(data + 1, end)) {
|
||||
// If we haven't received enough data to check if it's http:// or https://, let's try again later
|
||||
case -1:
|
||||
return ConsumeRequestLineResult::shortRead();
|
||||
return ConsumeRequestLineResult::shortRead(false, isConnect);
|
||||
// Otherwise, if it's not http:// or https://, return 400
|
||||
default:
|
||||
return ConsumeRequestLineResult::error(HTTP_HEADER_PARSER_ERROR_INVALID_REQUEST);
|
||||
@@ -635,7 +639,7 @@ namespace uWS
|
||||
}
|
||||
|
||||
/* End is only used for the proxy parser. The HTTP parser recognizes "\ra" as invalid "\r\n" scan and breaks. */
|
||||
static HttpParserResult getHeaders(char *postPaddedBuffer, char *end, struct HttpRequest::Header *headers, void *reserved, bool &isAncientHTTP, bool useStrictMethodValidation, uint64_t maxHeaderSize) {
|
||||
static HttpParserResult getHeaders(char *postPaddedBuffer, char *end, struct HttpRequest::Header *headers, void *reserved, bool &isAncientHTTP, bool &isConnectRequest, bool useStrictMethodValidation, uint64_t maxHeaderSize) {
|
||||
char *preliminaryKey, *preliminaryValue, *start = postPaddedBuffer;
|
||||
#ifdef UWS_WITH_PROXY
|
||||
/* ProxyParser is passed as reserved parameter */
|
||||
@@ -689,6 +693,9 @@ namespace uWS
|
||||
if(requestLineResult.isAncientHTTP) {
|
||||
isAncientHTTP = true;
|
||||
}
|
||||
if(requestLineResult.isConnect) {
|
||||
isConnectRequest = true;
|
||||
}
|
||||
/* No request headers found */
|
||||
const char * headerStart = (headers[0].key.length() > 0) ? headers[0].key.data() : end;
|
||||
|
||||
@@ -798,7 +805,7 @@ namespace uWS
|
||||
|
||||
/* This is the only caller of getHeaders and is thus the deepest part of the parser. */
|
||||
template <bool ConsumeMinimally>
|
||||
HttpParserResult fenceAndConsumePostPadded(uint64_t maxHeaderSize, bool requireHostHeader, bool useStrictMethodValidation, char *data, unsigned int length, void *user, void *reserved, HttpRequest *req, MoveOnlyFunction<void *(void *, HttpRequest *)> &requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &dataHandler) {
|
||||
HttpParserResult fenceAndConsumePostPadded(uint64_t maxHeaderSize, bool& isConnectRequest, bool requireHostHeader, bool useStrictMethodValidation, char *data, unsigned int length, void *user, void *reserved, HttpRequest *req, MoveOnlyFunction<void *(void *, HttpRequest *)> &requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &dataHandler) {
|
||||
|
||||
/* How much data we CONSUMED (to throw away) */
|
||||
unsigned int consumedTotal = 0;
|
||||
@@ -809,7 +816,7 @@ namespace uWS
|
||||
data[length + 1] = 'a'; /* Anything that is not \n, to trigger "invalid request" */
|
||||
req->ancientHttp = false;
|
||||
for (;length;) {
|
||||
auto result = getHeaders(data, data + length, req->headers, reserved, req->ancientHttp, useStrictMethodValidation, maxHeaderSize);
|
||||
auto result = getHeaders(data, data + length, req->headers, reserved, req->ancientHttp, isConnectRequest, useStrictMethodValidation, maxHeaderSize);
|
||||
if(result.isError()) {
|
||||
return result;
|
||||
}
|
||||
@@ -916,6 +923,10 @@ namespace uWS
|
||||
length -= emittable;
|
||||
consumedTotal += emittable;
|
||||
}
|
||||
} else if(isConnectRequest) {
|
||||
// This only server to mark that the connect request read all headers
|
||||
// and can starting emitting data
|
||||
remainingStreamingBytes = STATE_IS_CHUNKED;
|
||||
} else {
|
||||
/* If we came here without a body; emit an empty data chunk to signal no data */
|
||||
dataHandler(user, {}, true);
|
||||
@@ -931,15 +942,16 @@ namespace uWS
|
||||
}
|
||||
|
||||
public:
|
||||
HttpParserResult consumePostPadded(uint64_t maxHeaderSize, bool requireHostHeader, bool useStrictMethodValidation, char *data, unsigned int length, void *user, void *reserved, MoveOnlyFunction<void *(void *, HttpRequest *)> &&requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &&dataHandler) {
|
||||
|
||||
HttpParserResult consumePostPadded(uint64_t maxHeaderSize, bool& isConnectRequest, bool requireHostHeader, bool useStrictMethodValidation, char *data, unsigned int length, void *user, void *reserved, MoveOnlyFunction<void *(void *, HttpRequest *)> &&requestHandler, MoveOnlyFunction<void *(void *, std::string_view, bool)> &&dataHandler) {
|
||||
/* This resets BloomFilter by construction, but later we also reset it again.
|
||||
* Optimize this to skip resetting twice (req could be made global) */
|
||||
HttpRequest req;
|
||||
if (remainingStreamingBytes) {
|
||||
|
||||
/* It's either chunked or with a content-length */
|
||||
if (isParsingChunkedEncoding(remainingStreamingBytes)) {
|
||||
if (isConnectRequest) {
|
||||
dataHandler(user, std::string_view(data, length), false);
|
||||
return HttpParserResult::success(0, user);
|
||||
} else if (isParsingChunkedEncoding(remainingStreamingBytes)) {
|
||||
/* It's either chunked or with a content-length */
|
||||
std::string_view dataToConsume(data, length);
|
||||
for (auto chunk : uWS::ChunkIterator(&dataToConsume, &remainingStreamingBytes)) {
|
||||
dataHandler(user, chunk, chunk.length() == 0);
|
||||
@@ -950,6 +962,7 @@ public:
|
||||
data = (char *) dataToConsume.data();
|
||||
length = (unsigned int) dataToConsume.length();
|
||||
} else {
|
||||
|
||||
// this is exactly the same as below!
|
||||
// todo: refactor this
|
||||
if (remainingStreamingBytes >= length) {
|
||||
@@ -980,7 +993,7 @@ public:
|
||||
fallback.append(data, maxCopyDistance);
|
||||
|
||||
// break here on break
|
||||
HttpParserResult consumed = fenceAndConsumePostPadded<true>(maxHeaderSize, requireHostHeader, useStrictMethodValidation, fallback.data(), (unsigned int) fallback.length(), user, reserved, &req, requestHandler, dataHandler);
|
||||
HttpParserResult consumed = fenceAndConsumePostPadded<true>(maxHeaderSize, isConnectRequest, requireHostHeader, useStrictMethodValidation, fallback.data(), (unsigned int) fallback.length(), user, reserved, &req, requestHandler, dataHandler);
|
||||
/* Return data will be different than user if we are upgraded to WebSocket or have an error */
|
||||
if (consumed.returnedData != user) {
|
||||
return consumed;
|
||||
@@ -997,8 +1010,11 @@ public:
|
||||
length -= consumedBytes - had;
|
||||
|
||||
if (remainingStreamingBytes) {
|
||||
/* It's either chunked or with a content-length */
|
||||
if (isParsingChunkedEncoding(remainingStreamingBytes)) {
|
||||
if(isConnectRequest) {
|
||||
dataHandler(user, std::string_view(data, length), false);
|
||||
return HttpParserResult::success(0, user);
|
||||
} else if (isParsingChunkedEncoding(remainingStreamingBytes)) {
|
||||
/* It's either chunked or with a content-length */
|
||||
std::string_view dataToConsume(data, length);
|
||||
for (auto chunk : uWS::ChunkIterator(&dataToConsume, &remainingStreamingBytes)) {
|
||||
dataHandler(user, chunk, chunk.length() == 0);
|
||||
@@ -1037,7 +1053,7 @@ public:
|
||||
}
|
||||
}
|
||||
|
||||
HttpParserResult consumed = fenceAndConsumePostPadded<false>(maxHeaderSize, requireHostHeader, useStrictMethodValidation, data, length, user, reserved, &req, requestHandler, dataHandler);
|
||||
HttpParserResult consumed = fenceAndConsumePostPadded<false>(maxHeaderSize, isConnectRequest, requireHostHeader, useStrictMethodValidation, data, length, user, reserved, &req, requestHandler, dataHandler);
|
||||
/* Return data will be different than user if we are upgraded to WebSocket or have an error */
|
||||
if (consumed.returnedData != user) {
|
||||
return consumed;
|
||||
|
||||
@@ -243,7 +243,7 @@ public:
|
||||
/* Manually upgrade to WebSocket. Typically called in upgrade handler. Immediately calls open handler.
|
||||
* NOTE: Will invalidate 'this' as socket might change location in memory. Throw away after use. */
|
||||
template <typename UserData>
|
||||
us_socket_t *upgrade(UserData &&userData, std::string_view secWebSocketKey, std::string_view secWebSocketProtocol,
|
||||
us_socket_t *upgrade(UserData&& userData, std::string_view secWebSocketKey, std::string_view secWebSocketProtocol,
|
||||
std::string_view secWebSocketExtensions,
|
||||
struct us_socket_context_t *webSocketContext) {
|
||||
|
||||
@@ -350,7 +350,8 @@ public:
|
||||
us_socket_timeout(SSL, (us_socket_t *) webSocket, webSocketContextData->idleTimeoutComponents.first);
|
||||
|
||||
/* Move construct the UserData right before calling open handler */
|
||||
new (webSocket->getUserData()) UserData(std::move(userData));
|
||||
new (webSocket->getUserData()) UserData(std::forward<UserData>(userData));
|
||||
|
||||
|
||||
/* Emit open event and start the timeout */
|
||||
if (webSocketContextData->openHandler) {
|
||||
@@ -741,6 +742,10 @@ public:
|
||||
|
||||
return httpResponseData->socketData;
|
||||
}
|
||||
bool isConnectRequest() {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
return httpResponseData->isConnectRequest;
|
||||
}
|
||||
|
||||
void setWriteOffset(uint64_t offset) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
@@ -108,6 +108,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
uint8_t state = 0;
|
||||
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
|
||||
bool fromAncientRequest = false;
|
||||
bool isConnectRequest = false;
|
||||
bool isIdle = true;
|
||||
bool shouldCloseOnceIdle = false;
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
## Features:
|
||||
|
||||
- Live in-editor error messages (gif below)
|
||||
- Test runner codelens
|
||||
- Vscode test runner support
|
||||
- Debugger support
|
||||
- Run scripts from package.json
|
||||
- Visual lockfile viewer for old binary lockfiles (`bun.lockb`)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "bun-vscode",
|
||||
"version": "0.0.29",
|
||||
"version": "0.0.31",
|
||||
"author": "oven",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -116,20 +116,6 @@
|
||||
"category": "Bun",
|
||||
"enablement": "!inDebugMode && resourceLangId =~ /^(javascript|typescript|javascriptreact|typescriptreact)$/ && !isInDiffEditor && resourceScheme == 'untitled'",
|
||||
"icon": "$(play-circle)"
|
||||
},
|
||||
{
|
||||
"command": "extension.bun.runTest",
|
||||
"title": "Run all tests",
|
||||
"shortTitle": "Run Test",
|
||||
"category": "Bun",
|
||||
"icon": "$(play)"
|
||||
},
|
||||
{
|
||||
"command": "extension.bun.watchTest",
|
||||
"title": "Run all tests in watch mode",
|
||||
"shortTitle": "Run Test Watch",
|
||||
"category": "Bun",
|
||||
"icon": "$(sync)"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
|
||||
@@ -30,7 +30,7 @@ describe("BunTestController", () => {
|
||||
const pattern = internal.buildTestNamePattern(mockTests);
|
||||
|
||||
expect(pattern).toContain(".*?");
|
||||
expect(pattern).toBe("(^ test with .*?$)|(^ test with \\.*?$)");
|
||||
expect(pattern).toBe("(^ ?test with .*?$)|(^ ?test with \\.*?$)");
|
||||
});
|
||||
|
||||
test("should escape % formatters", () => {
|
||||
@@ -41,7 +41,7 @@ describe("BunTestController", () => {
|
||||
|
||||
const pattern = internal.buildTestNamePattern(mockTests);
|
||||
|
||||
expect(pattern).toBe("(^ test with .*?$)|(^ test with .*?$)");
|
||||
expect(pattern).toBe("(^ ?test with .*?$)|(^ ?test with .*?$)");
|
||||
});
|
||||
|
||||
test("should join multiple patterns with |", () => {
|
||||
@@ -53,7 +53,7 @@ describe("BunTestController", () => {
|
||||
|
||||
const pattern = internal.buildTestNamePattern(mockTests);
|
||||
|
||||
expect(pattern).toBe("(^ test 1$)|(^ test 2$)|(^ test 3$)");
|
||||
expect(pattern).toBe("(^ ?test 1$)|(^ ?test 2$)|(^ ?test 3$)");
|
||||
});
|
||||
|
||||
test("should handle describe blocks differently", () => {
|
||||
@@ -61,7 +61,7 @@ describe("BunTestController", () => {
|
||||
|
||||
const pattern = internal.buildTestNamePattern(mockTests);
|
||||
|
||||
expect(pattern).toBe("(^ describe block )");
|
||||
expect(pattern).toBe("(^ ?describe block )");
|
||||
});
|
||||
|
||||
test("should handle complex nested test names", () => {
|
||||
|
||||
@@ -1339,9 +1339,9 @@ export class BunTestController implements vscode.Disposable {
|
||||
t = t.replaceAll(/\$[\w\.\[\]]+/g, ".*?");
|
||||
|
||||
if (test?.tags?.some(tag => tag.id === "test" || tag.id === "it")) {
|
||||
testNames.push(`^ ${t}$`);
|
||||
testNames.push(`^ ?${t}$`);
|
||||
} else if (test?.tags?.some(tag => tag.id === "describe")) {
|
||||
testNames.push(`^ ${t} `);
|
||||
testNames.push(`^ ?${t} `);
|
||||
} else {
|
||||
testNames.push(t);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 18
|
||||
# Version: 19
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -685,6 +685,8 @@ install_common_software() {
|
||||
apt-transport-https \
|
||||
software-properties-common
|
||||
fi
|
||||
install_packages \
|
||||
libc6-dbg
|
||||
;;
|
||||
dnf)
|
||||
install_packages \
|
||||
@@ -1193,7 +1195,7 @@ install_docker() {
|
||||
execute_sudo amazon-linux-extras install docker
|
||||
;;
|
||||
amzn-* | alpine-*)
|
||||
install_packages docker
|
||||
install_packages docker docker-cli-compose
|
||||
;;
|
||||
*)
|
||||
sh="$(require sh)"
|
||||
@@ -1208,10 +1210,17 @@ install_docker() {
|
||||
if [ -f "$systemctl" ]; then
|
||||
execute_sudo "$systemctl" enable docker
|
||||
fi
|
||||
if [ "$os" = "linux" ] && [ "$distro" = "alpine" ]; then
|
||||
execute doas rc-update add docker default
|
||||
execute doas rc-service docker start
|
||||
fi
|
||||
|
||||
getent="$(which getent)"
|
||||
if [ -n "$("$getent" group docker)" ]; then
|
||||
usermod="$(which usermod)"
|
||||
if [ -z "$usermod" ]; then
|
||||
usermod="$(sudo which usermod)"
|
||||
fi
|
||||
if [ -f "$usermod" ]; then
|
||||
execute_sudo "$usermod" -aG docker "$user"
|
||||
fi
|
||||
|
||||
@@ -117,7 +117,7 @@ async function countReactions(issueNumbers: number[], verbose = false): Promise<
|
||||
}
|
||||
|
||||
// Small delay to avoid rate limiting
|
||||
await Bun.sleep(50);
|
||||
await Bun.sleep(1);
|
||||
}
|
||||
|
||||
return totalReactions;
|
||||
|
||||
72
scripts/handle-crash-patterns.ts
Normal file
72
scripts/handle-crash-patterns.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
const body = process.env.GITHUB_ISSUE_BODY || "";
|
||||
const title = process.env.GITHUB_ISSUE_TITLE || "";
|
||||
const issueNumber = process.env.GITHUB_ISSUE_NUMBER;
|
||||
|
||||
if (!issueNumber) {
|
||||
throw new Error("GITHUB_ISSUE_NUMBER must be set");
|
||||
}
|
||||
|
||||
interface CloseAction {
|
||||
reason: "not_planned" | "completed";
|
||||
comment: string;
|
||||
}
|
||||
|
||||
let closeAction: CloseAction | null = null;
|
||||
|
||||
// Check for workers_terminated
|
||||
if (body.includes("workers_terminated")) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #15964
|
||||
We are tracking worker stability issues in https://github.com/oven-sh/bun/issues/15964. For now, I recommend against terminating workers when possible.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for better-sqlite3 with RunCommand or AutoCommand
|
||||
else if (body.includes("better-sqlite3") && (body.includes("[RunCommand]") || body.includes("[AutoCommand]"))) {
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment: `Duplicate of #4290.
|
||||
better-sqlite3 is not supported yet in Bun due to missing V8 C++ APIs. For now, you can try [bun:sqlite](https://bun.com/docs/api/sqlite) for an almost drop-in replacement.`,
|
||||
};
|
||||
}
|
||||
|
||||
// Check for CPU architecture issues (Segmentation Fault/Illegal Instruction with no_avx)
|
||||
else if (
|
||||
(body.includes("Segmentation Fault") ||
|
||||
body.includes("Illegal Instruction") ||
|
||||
body.includes("IllegalInstruction")) &&
|
||||
body.includes("no_avx")
|
||||
) {
|
||||
let comment = `Bun requires a CPU with the micro-architecture [\`nehalem\`](https://en.wikipedia.org/wiki/Nehalem_(microarchitecture)) or later (released in 2008). If you're using a CPU emulator like qemu, then try enabling x86-64-v2.`;
|
||||
|
||||
// Check if it's macOS
|
||||
const platformMatch = body.match(/Platform:\s*([^\n]+)/i) || body.match(/on\s+(macos|darwin)/i);
|
||||
const isMacOS =
|
||||
platformMatch &&
|
||||
(platformMatch[1]?.toLowerCase().includes("darwin") || platformMatch[1]?.toLowerCase().includes("macos"));
|
||||
|
||||
if (isMacOS) {
|
||||
comment += `\n\nIf you're on a macOS silicon device, you're running Bun via the Rosetta CPU emulator and your best option is to run Bun natively instead.`;
|
||||
}
|
||||
|
||||
closeAction = {
|
||||
reason: "not_planned",
|
||||
comment,
|
||||
};
|
||||
}
|
||||
|
||||
if (closeAction) {
|
||||
// Output the action to take
|
||||
console.write(
|
||||
JSON.stringify({
|
||||
close: true,
|
||||
reason: closeAction.reason,
|
||||
comment: closeAction.comment,
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
console.write(JSON.stringify({ close: false }));
|
||||
}
|
||||
@@ -6,6 +6,9 @@ if (!body) {
|
||||
|
||||
const latest = (await Bun.file(join(import.meta.dir, "..", "LATEST")).text()).trim();
|
||||
|
||||
// Check if this is a standalone executable
|
||||
const isStandalone = body.includes("standalone_executable");
|
||||
|
||||
const lines = body.split("\n").reverse();
|
||||
|
||||
for (let line of lines) {
|
||||
@@ -39,6 +42,11 @@ for (let line of lines) {
|
||||
await Bun.write("is-outdated.txt", "true");
|
||||
await Bun.write("outdated.txt", version);
|
||||
|
||||
// Write flag for standalone executables
|
||||
if (isStandalone) {
|
||||
await Bun.write("is-standalone.txt", "true");
|
||||
}
|
||||
|
||||
const isVeryOutdated =
|
||||
major !== latestMajor || minor !== latestMinor || (latestPatch > patch && latestPatch - patch > 3);
|
||||
|
||||
|
||||
@@ -72,6 +72,7 @@ const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd();
|
||||
const testsPath = join(cwd, "test");
|
||||
|
||||
const spawnTimeout = 5_000;
|
||||
const spawnBunTimeout = 20_000; // when running with ASAN/LSAN bun can take a bit longer to exit, not a bug.
|
||||
const testTimeout = 3 * 60_000;
|
||||
const integrationTimeout = 5 * 60_000;
|
||||
|
||||
@@ -79,7 +80,7 @@ function getNodeParallelTestTimeout(testPath) {
|
||||
if (testPath.includes("test-dns")) {
|
||||
return 90_000;
|
||||
}
|
||||
return 10_000;
|
||||
return 20_000;
|
||||
}
|
||||
|
||||
process.on("SIGTRAP", () => {
|
||||
@@ -298,7 +299,7 @@ function getTestExpectations() {
|
||||
return expectations;
|
||||
}
|
||||
|
||||
const skipArray = (() => {
|
||||
const skipsForExceptionValidation = (() => {
|
||||
const path = join(cwd, "test/no-validate-exceptions.txt");
|
||||
if (!existsSync(path)) {
|
||||
return [];
|
||||
@@ -309,13 +310,32 @@ const skipArray = (() => {
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
})();
|
||||
|
||||
const skipsForLeaksan = (() => {
|
||||
const path = join(cwd, "test/no-validate-leaksan.txt");
|
||||
if (!existsSync(path)) {
|
||||
return [];
|
||||
}
|
||||
return readFileSync(path, "utf-8")
|
||||
.split("\n")
|
||||
.filter(line => !line.startsWith("#") && line.length > 0);
|
||||
})();
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateExceptions = test => {
|
||||
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
|
||||
return !(skipsForExceptionValidation.includes(test) || skipsForExceptionValidation.includes("test/" + test));
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns whether we should validate exception checks running the given test
|
||||
* @param {string} test
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const shouldValidateLeakSan = test => {
|
||||
return !(skipsForLeaksan.includes(test) || skipsForLeaksan.includes("test/" + test));
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -400,7 +420,9 @@ async function runTests() {
|
||||
|
||||
const okResults = [];
|
||||
const flakyResults = [];
|
||||
const flakyResultsTitles = [];
|
||||
const failedResults = [];
|
||||
const failedResultsTitles = [];
|
||||
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
|
||||
|
||||
const parallelism = options["parallel"] ? availableParallelism() : 1;
|
||||
@@ -436,6 +458,7 @@ async function runTests() {
|
||||
if (ok) {
|
||||
if (failure) {
|
||||
flakyResults.push(failure);
|
||||
flakyResultsTitles.push(title);
|
||||
} else {
|
||||
okResults.push(result);
|
||||
}
|
||||
@@ -455,6 +478,7 @@ async function runTests() {
|
||||
if (attempt >= maxAttempts || isAlwaysFailure(error)) {
|
||||
flaky = false;
|
||||
failedResults.push(failure);
|
||||
failedResultsTitles.push(title);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -567,6 +591,12 @@ async function runTests() {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(testPath)) {
|
||||
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
|
||||
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1";
|
||||
// prettier-ignore
|
||||
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
|
||||
}
|
||||
return runTest(title, async () => {
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
cwd: cwd,
|
||||
@@ -624,6 +654,16 @@ async function runTests() {
|
||||
throw new Error(`Unsupported package manager: ${packageManager}`);
|
||||
}
|
||||
|
||||
// build
|
||||
const buildResult = await spawnBun(execPath, {
|
||||
cwd: vendorPath,
|
||||
args: ["run", "build"],
|
||||
timeout: 60_000,
|
||||
});
|
||||
if (!buildResult.ok) {
|
||||
throw new Error(`Failed to build vendor: ${buildResult.error}`);
|
||||
}
|
||||
|
||||
for (const testPath of testPaths) {
|
||||
const title = join(relative(cwd, vendorPath), testPath).replace(/\\/g, "/");
|
||||
|
||||
@@ -645,6 +685,9 @@ async function runTests() {
|
||||
}
|
||||
}
|
||||
|
||||
// tests are all over, close the group from the final test. any further output should print ungrouped.
|
||||
startGroup("End");
|
||||
|
||||
if (isGithubAction) {
|
||||
reportOutputToGitHubAction("failing_tests_count", failedResults.length);
|
||||
const markdown = formatTestToMarkdown(failedResults, false, 0);
|
||||
@@ -809,14 +852,14 @@ async function runTests() {
|
||||
|
||||
if (failedResults.length) {
|
||||
console.log(`${getAnsi("red")}Failing Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of failedResults) {
|
||||
for (const testPath of failedResultsTitles) {
|
||||
console.log(`${getAnsi("red")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (flakyResults.length) {
|
||||
console.log(`${getAnsi("yellow")}Flaky Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of flakyResults) {
|
||||
for (const testPath of flakyResultsTitles) {
|
||||
console.log(`${getAnsi("yellow")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
}
|
||||
@@ -1094,10 +1137,6 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
: { BUN_ENABLE_CRASH_REPORTING: "0" }),
|
||||
};
|
||||
|
||||
if (basename(execPath).includes("asan")) {
|
||||
bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0";
|
||||
}
|
||||
|
||||
if (isWindows && bunEnv.Path) {
|
||||
delete bunEnv.Path;
|
||||
}
|
||||
@@ -1114,6 +1153,9 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
}
|
||||
bunEnv["TEMP"] = tmpdirPath;
|
||||
}
|
||||
if (timeout === undefined) {
|
||||
timeout = spawnBunTimeout;
|
||||
}
|
||||
try {
|
||||
const existingCores = options["coredump-upload"] ? readdirSync(coresDir) : [];
|
||||
const result = await spawnSafe({
|
||||
@@ -1250,17 +1292,17 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
|
||||
*
|
||||
* @param {string} execPath
|
||||
* @param {string} testPath
|
||||
* @param {object} [options]
|
||||
* @param {string} [options.cwd]
|
||||
* @param {string[]} [options.args]
|
||||
* @param {object} [opts]
|
||||
* @param {string} [opts.cwd]
|
||||
* @param {string[]} [opts.args]
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
async function spawnBunTest(execPath, testPath, opts = { cwd }) {
|
||||
const timeout = getTestTimeout(testPath);
|
||||
const perTestTimeout = Math.ceil(timeout / 2);
|
||||
const absPath = join(options["cwd"], testPath);
|
||||
const absPath = join(opts["cwd"], testPath);
|
||||
const isReallyTest = isTestStrict(testPath) || absPath.includes("vendor");
|
||||
const args = options["args"] ?? [];
|
||||
const args = opts["args"] ?? [];
|
||||
|
||||
const testArgs = ["test", ...args, `--timeout=${perTestTimeout}`];
|
||||
|
||||
@@ -1291,10 +1333,16 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateLeakSan(relative(cwd, absPath))) {
|
||||
env.BUN_DESTRUCT_VM_ON_EXIT = "1";
|
||||
env.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1";
|
||||
// prettier-ignore
|
||||
env.LSAN_OPTIONS = `malloc_context_size=100:print_suppressions=0:suppressions=${process.cwd()}/test/leaksan.supp`;
|
||||
}
|
||||
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
args: isReallyTest ? testArgs : [...args, absPath],
|
||||
cwd: options["cwd"],
|
||||
cwd: opts["cwd"],
|
||||
timeout: isReallyTest ? timeout : 30_000,
|
||||
env,
|
||||
stdout: options.stdout,
|
||||
@@ -1528,7 +1576,11 @@ function isNodeTest(path) {
|
||||
return false;
|
||||
}
|
||||
const unixPath = path.replaceAll(sep, "/");
|
||||
return unixPath.includes("js/node/test/parallel/") || unixPath.includes("js/node/test/sequential/");
|
||||
return (
|
||||
unixPath.includes("js/node/test/parallel/") ||
|
||||
unixPath.includes("js/node/test/sequential/") ||
|
||||
unixPath.includes("js/bun/test/parallel/")
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2217,7 +2269,7 @@ function getExitCode(outcome) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// A flaky segfault, sigtrap, or sigill must never be ignored.
|
||||
// A flaky segfault, sigtrap, or sigkill must never be ignored.
|
||||
// If it happens in CI, it will happen to our users.
|
||||
// Flaky AddressSanitizer errors cannot be ignored since they still represent real bugs.
|
||||
function isAlwaysFailure(error) {
|
||||
@@ -2226,6 +2278,7 @@ function isAlwaysFailure(error) {
|
||||
error.includes("segmentation fault") ||
|
||||
error.includes("illegal instruction") ||
|
||||
error.includes("sigtrap") ||
|
||||
error.includes("sigkill") ||
|
||||
error.includes("error: addresssanitizer") ||
|
||||
error.includes("internal assertion failure") ||
|
||||
error.includes("core dumped") ||
|
||||
|
||||
@@ -2808,6 +2808,8 @@ export function endGroup() {
|
||||
} else {
|
||||
console.groupEnd();
|
||||
}
|
||||
// when a file exits with an ASAN error, there is no trailing newline so we add one here to make sure `console.group()` detection doesn't get broken in CI.
|
||||
console.log();
|
||||
}
|
||||
|
||||
export function printEnvironment() {
|
||||
@@ -2864,6 +2866,12 @@ export function printEnvironment() {
|
||||
spawnSync([shell, "-c", "free -m -w"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
startGroup("Docker", () => {
|
||||
const shell = which(["sh", "bash"]);
|
||||
if (shell) {
|
||||
spawnSync([shell, "-c", "docker ps"], { stdio: "inherit" });
|
||||
}
|
||||
});
|
||||
}
|
||||
if (isWindows) {
|
||||
startGroup("Disk (win)", () => {
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -exo pipefail
|
||||
|
||||
WEBKIT_VERSION=$(grep 'set(WEBKIT_TAG' "CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')')
|
||||
MIMALLOC_VERSION=$(git rev-parse HEAD:./src/deps/mimalloc)
|
||||
LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
|
||||
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
|
||||
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
|
||||
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
|
||||
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
|
||||
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
|
||||
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
|
||||
ZSTD=$(git rev-parse HEAD:./src/deps/zstd)
|
||||
LSHPACK=$(git rev-parse HEAD:./src/deps/ls-hpack)
|
||||
LIBDEFLATE=$(git rev-parse HEAD:./src/deps/libdeflate)
|
||||
|
||||
rm -rf src/generated_versions_list.zig
|
||||
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
|
||||
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
|
||||
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
|
||||
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
|
||||
echo "pub const libdeflate = \"$LIBDEFLATE\";" >>src/generated_versions_list.zig
|
||||
echo "pub const zstd = \"$ZSTD\";" >>src/generated_versions_list.zig
|
||||
echo "pub const lshpack = \"$LSHPACK\";" >>src/generated_versions_list.zig
|
||||
echo "" >>src/generated_versions_list.zig
|
||||
|
||||
zig fmt src/generated_versions_list.zig
|
||||
@@ -121,6 +121,10 @@ pub fn exit(code: u32) noreturn {
|
||||
std.os.windows.kernel32.ExitProcess(code);
|
||||
},
|
||||
else => {
|
||||
if (Environment.enable_asan) {
|
||||
std.c.exit(@bitCast(code));
|
||||
std.c.abort(); // exit should be noreturn
|
||||
}
|
||||
bun.c.quick_exit(@bitCast(code));
|
||||
std.c.abort(); // quick_exit should be noreturn
|
||||
},
|
||||
@@ -159,7 +163,7 @@ pub inline fn mimalloc_cleanup(force: bool) void {
|
||||
Mimalloc.mi_collect(force);
|
||||
}
|
||||
}
|
||||
pub const versions = @import("./generated_versions_list.zig");
|
||||
// Versions are now handled by CMake-generated header (bun_dependency_versions.h)
|
||||
|
||||
// Enabling huge pages slows down bun by 8x or so
|
||||
// Keeping this code for:
|
||||
|
||||
@@ -18,7 +18,7 @@ pub fn deinit(this: *HTMLScanner) void {
|
||||
for (this.import_records.slice()) |*record| {
|
||||
this.allocator.free(record.path.text);
|
||||
}
|
||||
this.import_records.deinitWithAllocator(this.allocator);
|
||||
this.import_records.deinit(this.allocator);
|
||||
}
|
||||
|
||||
fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKind) !void {
|
||||
@@ -41,10 +41,10 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi
|
||||
const record = ImportRecord{
|
||||
.path = fs.Path.init(try this.allocator.dupeZ(u8, path_to_use)),
|
||||
.kind = kind,
|
||||
.range = .none,
|
||||
.range = logger.Range.None,
|
||||
};
|
||||
|
||||
try this.import_records.push(this.allocator, record);
|
||||
try this.import_records.append(this.allocator, record);
|
||||
}
|
||||
|
||||
const debug = bun.Output.scoped(.HTMLScanner, .hidden);
|
||||
@@ -56,7 +56,7 @@ pub fn onWriteHTML(_: *HTMLScanner, bytes: []const u8) void {
|
||||
pub fn onHTMLParseError(this: *HTMLScanner, message: []const u8) void {
|
||||
this.log.addError(
|
||||
this.source,
|
||||
.none,
|
||||
logger.Loc.Empty,
|
||||
message,
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
|
||||
@@ -199,7 +199,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
store.ref();
|
||||
|
||||
const b = bun.webcore.Blob.initWithStore(store, globalObject).new();
|
||||
b.allocator = bun.default_allocator;
|
||||
|
||||
if (bun.http.MimeType.byExtensionNoDefault(bun.strings.trimLeadingChar(std.fs.path.extension(this.name), '.'))) |mime| {
|
||||
store.mime_type = mime;
|
||||
@@ -724,7 +723,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
defer pe_file.deinit();
|
||||
pe_file.addBunSection(bytes) catch |err| {
|
||||
// Always strip authenticode when adding .bun section for --compile
|
||||
pe_file.addBunSection(bytes, .strip_always) catch |err| {
|
||||
Output.prettyErrorln("Error adding Bun section to PE file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
return bun.invalid_fd;
|
||||
|
||||
@@ -229,6 +229,11 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
this.data[index] = item;
|
||||
return &this.data[index];
|
||||
}
|
||||
|
||||
pub fn deinit(this: *OverflowBlock) void {
|
||||
if (this.prev) |p| p.deinit();
|
||||
bun.default_allocator.destroy(this);
|
||||
}
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
@@ -264,6 +269,12 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.head.deinit();
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.used >= @as(u16, count);
|
||||
}
|
||||
@@ -350,6 +361,12 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *const Self) void {
|
||||
_ = self;
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub inline fn isOverflowing() bool {
|
||||
return instance.slice_buf_used >= @as(u16, count);
|
||||
}
|
||||
@@ -530,6 +547,12 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.index.deinit(self.allocator);
|
||||
bun.default_allocator.destroy(instance);
|
||||
loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.backing_buf_used >= @as(u16, count);
|
||||
}
|
||||
@@ -653,6 +676,10 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
pub fn values(self: *Self) []ValueType {
|
||||
return (&self.backing_buf)[0..self.backing_buf_used];
|
||||
}
|
||||
};
|
||||
if (!store_keys) {
|
||||
return BSSMapType;
|
||||
@@ -684,6 +711,12 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
return instance;
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.map.deinit();
|
||||
bun.default_allocator.destroy(instance);
|
||||
instance_loaded = false;
|
||||
}
|
||||
|
||||
pub fn isOverflowing() bool {
|
||||
return instance.map.backing_buf_used >= count;
|
||||
}
|
||||
@@ -886,6 +919,8 @@ pub const Default = struct {
|
||||
_ = self;
|
||||
return c_allocator;
|
||||
}
|
||||
|
||||
pub const deinit = void;
|
||||
};
|
||||
|
||||
const basic = if (bun.use_mimalloc)
|
||||
|
||||
@@ -78,6 +78,15 @@ pub const Borrowed = struct {
|
||||
else
|
||||
null;
|
||||
}
|
||||
|
||||
pub fn downcast(std_alloc: std.mem.Allocator) Borrowed {
|
||||
bun.assertf(
|
||||
isInstance(std_alloc),
|
||||
"not a MimallocArena (vtable is {*})",
|
||||
.{std_alloc.vtable},
|
||||
);
|
||||
return .fromOpaque(std_alloc.ptr);
|
||||
}
|
||||
};
|
||||
|
||||
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
@@ -85,6 +94,8 @@ const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
const DebugHeap = struct {
|
||||
inner: *mimalloc.Heap,
|
||||
thread_lock: bun.safety.ThreadLock,
|
||||
|
||||
pub const deinit = void;
|
||||
};
|
||||
|
||||
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
|
||||
@@ -115,6 +126,7 @@ pub fn borrow(self: Self) Borrowed {
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadLocalDefault() std.mem.Allocator {
|
||||
if (bun.Environment.enable_asan) return bun.default_allocator;
|
||||
return Borrowed.getDefault().allocator();
|
||||
}
|
||||
|
||||
|
||||
@@ -186,10 +186,10 @@ const State = struct {
|
||||
self.history.unlock();
|
||||
}
|
||||
|
||||
fn deinit(self: *Self) void {
|
||||
pub fn deinit(self: *Self) void {
|
||||
defer self.* = undefined;
|
||||
var history = self.history.intoUnprotected();
|
||||
defer history.deinit();
|
||||
defer history.deinit(self.parent);
|
||||
|
||||
const count = history.allocations.count();
|
||||
if (count == 0) return;
|
||||
|
||||
@@ -112,6 +112,7 @@ pub const Features = struct {
|
||||
pub var unsupported_uv_function: usize = 0;
|
||||
pub var exited: usize = 0;
|
||||
pub var yarn_migration: usize = 0;
|
||||
pub var pnpm_migration: usize = 0;
|
||||
pub var yaml_parse: usize = 0;
|
||||
|
||||
comptime {
|
||||
|
||||
@@ -61,7 +61,7 @@ pub const AssignTarget = enum(u2) {
|
||||
};
|
||||
|
||||
pub const LocRef = struct {
|
||||
loc: logger.Loc = .none,
|
||||
loc: logger.Loc = logger.Loc.Empty,
|
||||
|
||||
// TODO: remove this optional and make Ref a function getter
|
||||
// That will make this struct 128 bits instead of 192 bits and we can remove some heap allocations
|
||||
@@ -121,7 +121,7 @@ pub const ClauseItem = struct {
|
||||
/// For exports: `export { foo as bar }` - "bar" is the alias
|
||||
/// For re-exports: `export { foo as bar } from 'path'` - "bar" is the alias
|
||||
alias: string,
|
||||
alias_loc: logger.Loc = .none,
|
||||
alias_loc: logger.Loc = logger.Loc.Empty,
|
||||
/// Reference to the actual symbol being imported/exported.
|
||||
/// For imports: `import { foo as bar }` - ref to the symbol representing "foo" from the source module
|
||||
/// For exports: `export { foo as bar }` - ref to the local symbol "foo"
|
||||
|
||||
@@ -22,9 +22,9 @@ exports_kind: ExportsKind = ExportsKind.none,
|
||||
|
||||
// This is a list of ES6 features. They are ranges instead of booleans so
|
||||
// that they can be used in log messages. Check to see if "Len > 0".
|
||||
import_keyword: logger.Range = .none, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = .none, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = .none,
|
||||
import_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax or "import()"
|
||||
export_keyword: logger.Range = logger.Range.None, // Does not include TypeScript-specific syntax
|
||||
top_level_await_keyword: logger.Range = logger.Range.None,
|
||||
|
||||
/// These are stored at the AST level instead of on individual AST nodes so
|
||||
/// they can be manipulated efficiently without a full AST traversal
|
||||
@@ -83,14 +83,14 @@ pub const TsEnumsMap = std.ArrayHashMapUnmanaged(Ref, bun.StringHashMapUnmanaged
|
||||
|
||||
pub fn fromParts(parts: []Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromOwnedSlice(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn initTest(parts: []Part) Ast {
|
||||
pub fn initTest(parts: []const Part) Ast {
|
||||
return Ast{
|
||||
.parts = Part.List.init(parts),
|
||||
.parts = Part.List.fromBorrowedSliceDangerous(parts),
|
||||
.runtime_imports = .{},
|
||||
};
|
||||
}
|
||||
@@ -107,9 +107,9 @@ pub fn toJSON(self: *const Ast, _: std.mem.Allocator, stream: anytype) !void {
|
||||
/// Do not call this if it wasn't globally allocated!
|
||||
pub fn deinit(this: *Ast) void {
|
||||
// TODO: assert mimalloc-owned memory
|
||||
if (this.parts.len > 0) this.parts.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.symbols.len > 0) this.symbols.deinitWithAllocator(bun.default_allocator);
|
||||
if (this.import_records.len > 0) this.import_records.deinitWithAllocator(bun.default_allocator);
|
||||
this.parts.deinit(bun.default_allocator);
|
||||
this.symbols.deinit(bun.default_allocator);
|
||||
this.import_records.deinit(bun.default_allocator);
|
||||
}
|
||||
|
||||
pub const Class = G.Class;
|
||||
|
||||
@@ -56,7 +56,14 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
};
|
||||
}
|
||||
|
||||
return Expr.init(E.Array, E.Array{ .items = ExprNodeList.init(exprs), .is_single_line = b.is_single_line }, loc);
|
||||
return Expr.init(
|
||||
E.Array,
|
||||
E.Array{
|
||||
.items = ExprNodeList.fromOwnedSlice(exprs),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
);
|
||||
},
|
||||
.b_object => |b| {
|
||||
const properties = wrapper
|
||||
@@ -77,7 +84,7 @@ pub fn toExpr(binding: *const Binding, wrapper: anytype) Expr {
|
||||
return Expr.init(
|
||||
E.Object,
|
||||
E.Object{
|
||||
.properties = G.Property.List.init(properties),
|
||||
.properties = G.Property.List.fromOwnedSlice(properties),
|
||||
.is_single_line = b.is_single_line,
|
||||
},
|
||||
loc,
|
||||
|
||||
@@ -111,7 +111,7 @@ pub fn toAST(this: *const BundledAst) Ast {
|
||||
.uses_exports_ref = this.flags.uses_exports_ref,
|
||||
.uses_module_ref = this.flags.uses_module_ref,
|
||||
// .uses_require_ref = ast.uses_require_ref,
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .none },
|
||||
.export_keyword = .{ .len = if (this.flags.uses_export_keyword) 1 else 0, .loc = .{} },
|
||||
.force_cjs_to_esm = this.flags.force_cjs_to_esm,
|
||||
.has_lazy_export = this.flags.has_lazy_export,
|
||||
.commonjs_module_exports_assigned_deoptimized = this.flags.commonjs_module_exports_assigned_deoptimized,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user