mirror of
https://github.com/oven-sh/bun
synced 2026-02-05 08:28:55 +00:00
Compare commits
168 Commits
pfg/assert
...
url
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a93ce9f4a6 | ||
|
|
1de386bca7 | ||
|
|
b39632c921 | ||
|
|
38325aa41c | ||
|
|
969bab3848 | ||
|
|
5bd4972d5b | ||
|
|
68780faee2 | ||
|
|
0bbc18fd19 | ||
|
|
53318c8b13 | ||
|
|
abe69901b2 | ||
|
|
c0cf0414a0 | ||
|
|
3dc3527171 | ||
|
|
af4f1c7d39 | ||
|
|
2c1dea818c | ||
|
|
cc125b475f | ||
|
|
cbbf88f3a6 | ||
|
|
8064a55a48 | ||
|
|
0531d6756c | ||
|
|
6135b3dec9 | ||
|
|
b08dd8795e | ||
|
|
c1eba5886f | ||
|
|
fcca2cc398 | ||
|
|
dd32e6b416 | ||
|
|
b453360dff | ||
|
|
1476e4c958 | ||
|
|
eacf89e5bf | ||
|
|
fa6ac405a4 | ||
|
|
4c8cbecb08 | ||
|
|
00b7d6479b | ||
|
|
bcf023c829 | ||
|
|
b7b1ca8ebe | ||
|
|
784bc4e012 | ||
|
|
dd5c40dab7 | ||
|
|
3a4a9ae4e9 | ||
|
|
9d1a35b658 | ||
|
|
61cc9c3947 | ||
|
|
e904a181d8 | ||
|
|
55a0bdc68d | ||
|
|
55454f7910 | ||
|
|
e4aeb761e4 | ||
|
|
f9efe94b85 | ||
|
|
7eb8a3feae | ||
|
|
d7ed9c673e | ||
|
|
b4dce96c40 | ||
|
|
52ef8b1778 | ||
|
|
baff3c900e | ||
|
|
23299dadf6 | ||
|
|
0d5e4e162b | ||
|
|
d27594ecf4 | ||
|
|
a2e2d114e9 | ||
|
|
da3d64b1ef | ||
|
|
ce64e04b16 | ||
|
|
55473cb64a | ||
|
|
752441d911 | ||
|
|
da5d4d791c | ||
|
|
6d453be7d9 | ||
|
|
2d441d868b | ||
|
|
56ad4cc4a6 | ||
|
|
d2acb2eac0 | ||
|
|
de7eafbdd1 | ||
|
|
4114986c3e | ||
|
|
8aa451c2dc | ||
|
|
497cef9759 | ||
|
|
dd57b95546 | ||
|
|
ea7c4986d7 | ||
|
|
6c7edf2dbe | ||
|
|
bf2f153f5c | ||
|
|
f64a4c4ace | ||
|
|
0216431c98 | ||
|
|
ae289c4858 | ||
|
|
5d1609fe5c | ||
|
|
471fe7b886 | ||
|
|
08222eda71 | ||
|
|
6f8c5959d0 | ||
|
|
40d5e745c9 | ||
|
|
225bfd54fa | ||
|
|
a6ca8c40d4 | ||
|
|
b52ad226a5 | ||
|
|
5f8f805db9 | ||
|
|
37c98bebd6 | ||
|
|
bd01df19c1 | ||
|
|
7fd16ebffa | ||
|
|
1bb211df56 | ||
|
|
bdd0b89f16 | ||
|
|
841f593b12 | ||
|
|
3afd19c73c | ||
|
|
b6a231add3 | ||
|
|
ca86bae5d5 | ||
|
|
215fdb4697 | ||
|
|
578bdf1cd6 | ||
|
|
cf2fa30639 | ||
|
|
5b3c58bdf5 | ||
|
|
0d6d4faa51 | ||
|
|
5e4642295a | ||
|
|
68f026b3cd | ||
|
|
5e9563833d | ||
|
|
6dd44cbeda | ||
|
|
a9ce4d40c2 | ||
|
|
663f00b62b | ||
|
|
f21fffd1bf | ||
|
|
d92d8dc886 | ||
|
|
6d127ba3f4 | ||
|
|
c3d9e8c7af | ||
|
|
c25e744837 | ||
|
|
dc01a5d6a8 | ||
|
|
c434b2c191 | ||
|
|
8ca0eb831d | ||
|
|
b19f13f5c4 | ||
|
|
bb3d570ad0 | ||
|
|
a6f37b398c | ||
|
|
39af2a0a56 | ||
|
|
7f6bb30877 | ||
|
|
812288eb72 | ||
|
|
9cbe1ec300 | ||
|
|
4f8c1c9124 | ||
|
|
468a392fd5 | ||
|
|
f61f03fae3 | ||
|
|
a468d09064 | ||
|
|
898feb886f | ||
|
|
c5cd0e4575 | ||
|
|
f4a0fe40aa | ||
|
|
2d2e329ee3 | ||
|
|
618d2cb3ac | ||
|
|
6c915fc1d0 | ||
|
|
aa60ab3b65 | ||
|
|
f855ae8618 | ||
|
|
514a47cb54 | ||
|
|
1a1cf0a4d7 | ||
|
|
9fbe64619b | ||
|
|
642e0ba73c | ||
|
|
19d7a5fe53 | ||
|
|
c04a2d1dfc | ||
|
|
82cb82d828 | ||
|
|
4ae982be4e | ||
|
|
2d65063571 | ||
|
|
746cf2cf01 | ||
|
|
9c1fde0132 | ||
|
|
f8f76a6fe0 | ||
|
|
4117af6e46 | ||
|
|
5bcaf32ba3 | ||
|
|
d01bfb5aa2 | ||
|
|
78b495aff5 | ||
|
|
6adb3954fe | ||
|
|
b152fbefcd | ||
|
|
8c0c97a273 | ||
|
|
95fcee8b76 | ||
|
|
c3f63bcdc4 | ||
|
|
2283ed098f | ||
|
|
43dcb8fce1 | ||
|
|
0eb6a4c55e | ||
|
|
144db9ca52 | ||
|
|
a6a4ca1e49 | ||
|
|
314b4d9b44 | ||
|
|
0e3e33072b | ||
|
|
3681aa9f0a | ||
|
|
c9d0fd51a9 | ||
|
|
4fe8b71437 | ||
|
|
1efab7f42d | ||
|
|
61a3f08595 | ||
|
|
363595fd31 | ||
|
|
173f67d81e | ||
|
|
05d5ab7489 | ||
|
|
b7bd5a4cf5 | ||
|
|
ab4da13785 | ||
|
|
ab3cb68f66 | ||
|
|
795f14c1d1 | ||
|
|
708ed00705 | ||
|
|
ff4eccc3b4 |
1733
.buildkite/ci.mjs
1733
.buildkite/ci.mjs
File diff suppressed because it is too large
Load Diff
@@ -8,4 +8,4 @@ function run_command() {
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
run_command node ".buildkite/ci.mjs"
|
||||
run_command node ".buildkite/ci.mjs" "$@"
|
||||
|
||||
92
.github/workflows/update-cares.yml
vendored
Normal file
92
.github/workflows/update-cares.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update c-ares
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check c-ares version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildCares.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildCares.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildCares.cmake
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
92
.github/workflows/update-libarchive.yml
vendored
Normal file
92
.github/workflows/update-libarchive.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update libarchive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libarchive version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibArchive.cmake
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)
|
||||
92
.github/workflows/update-libdeflate.yml
vendored
Normal file
92
.github/workflows/update-libdeflate.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update libdeflate
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libdeflate version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibDeflate.cmake
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)
|
||||
92
.github/workflows/update-lolhtml.yml
vendored
Normal file
92
.github/workflows/update-lolhtml.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update lolhtml
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lolhtml version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLolHtml.cmake
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)
|
||||
92
.github/workflows/update-lshpack.yml
vendored
Normal file
92
.github/workflows/update-lshpack.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update lshpack
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 5 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lshpack version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLshpack.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLshpack.cmake
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)
|
||||
109
.github/workflows/update-sqlite3.yml
vendored
Normal file
109
.github/workflows/update-sqlite3.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Update SQLite3
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 0" # Run weekly
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check SQLite version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get current version from the header file using SQLITE_VERSION_NUMBER
|
||||
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
|
||||
if [ -z "$CURRENT_VERSION_NUM" ]; then
|
||||
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
|
||||
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
|
||||
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
|
||||
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Fetch SQLite download page
|
||||
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
|
||||
if [ -z "$DOWNLOAD_PAGE" ]; then
|
||||
echo "Error: Failed to fetch SQLite download page"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract latest version and year from the amalgamation link
|
||||
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
|
||||
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
|
||||
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
|
||||
|
||||
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
|
||||
echo "Error: Could not extract latest version info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
|
||||
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
|
||||
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
|
||||
|
||||
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
|
||||
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Debug output
|
||||
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
|
||||
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
|
||||
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
src/bun.js/bindings/sqlite/sqlite3.c
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)
|
||||
21
.gitignore
vendored
21
.gitignore
vendored
@@ -26,6 +26,7 @@
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.generated.ts
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
@@ -53,8 +54,8 @@
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
/test.zig
|
||||
/testdir
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
@@ -111,10 +112,14 @@ pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
scripts/env.local
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bake/generated.ts
|
||||
src/generated_enum_extractor.zig
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
@@ -131,6 +136,7 @@ src/runtime.version
|
||||
src/tests.zig
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
test/node.js/upstream
|
||||
tsconfig.tsbuildinfo
|
||||
txt.js
|
||||
x64
|
||||
@@ -142,6 +148,9 @@ test/node.js/upstream
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
@@ -149,22 +158,24 @@ src/bake/generated.ts
|
||||
# Dependencies (before CMake)
|
||||
# These can be removed in the far future
|
||||
/src/bun.js/WebKit
|
||||
/src/deps/WebKit
|
||||
/src/deps/boringssl
|
||||
/src/deps/brotli
|
||||
/src/deps/c*ares
|
||||
/src/deps/lol*html
|
||||
/src/deps/libarchive
|
||||
/src/deps/libdeflate
|
||||
/src/deps/libuv
|
||||
/src/deps/lol*html
|
||||
/src/deps/ls*hpack
|
||||
/src/deps/mimalloc
|
||||
/src/deps/picohttpparser
|
||||
/src/deps/tinycc
|
||||
/src/deps/zstd
|
||||
/src/deps/zlib
|
||||
/src/deps/WebKit
|
||||
/src/deps/zig
|
||||
/src/deps/zlib
|
||||
/src/deps/zstd
|
||||
|
||||
# Generated files
|
||||
|
||||
.buildkite/ci.yml
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
@@ -5,6 +5,5 @@ test/js/deno
|
||||
test/node.js
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
test/js/node/test/fixtures
|
||||
test/js/node/test/common
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
|
||||
120
.vscode/launch.json
generated
vendored
120
.vscode/launch.json
generated
vendored
@@ -16,7 +16,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
@@ -33,7 +32,6 @@
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
@@ -56,7 +54,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
@@ -73,7 +70,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -90,7 +86,6 @@
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -107,7 +102,6 @@
|
||||
"args": ["test", "--hot", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -124,7 +118,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -147,7 +140,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -187,7 +179,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
"BUN_DEBUG_IncrementalGraph": "1",
|
||||
@@ -207,7 +198,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -223,9 +213,11 @@
|
||||
"args": ["run", "--watch", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
// "BUN_DEBUG_DEBUGGER": "1",
|
||||
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
@@ -239,7 +231,6 @@
|
||||
"args": ["run", "--hot", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -300,7 +291,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -317,7 +307,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
@@ -334,7 +323,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -351,7 +339,6 @@
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -368,7 +355,6 @@
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -385,7 +371,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -408,7 +393,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
@@ -432,7 +416,6 @@
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -449,7 +432,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -465,7 +447,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
},
|
||||
@@ -481,7 +462,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
"BUN_INSPECT": "ws://localhost:0/",
|
||||
@@ -503,7 +483,6 @@
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -519,7 +498,6 @@
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
},
|
||||
@@ -539,10 +517,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -568,10 +542,6 @@
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -597,10 +567,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -626,10 +592,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
@@ -655,10 +617,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -693,10 +651,6 @@
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -732,10 +686,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -761,10 +711,6 @@
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -786,10 +732,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -811,10 +753,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -845,10 +783,6 @@
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -880,10 +814,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -909,10 +839,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -938,10 +864,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "0",
|
||||
@@ -967,10 +889,6 @@
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -996,10 +914,6 @@
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1025,10 +939,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1063,10 +973,6 @@
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1102,10 +1008,6 @@
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1128,10 +1030,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1153,10 +1051,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1182,10 +1076,6 @@
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
@@ -1220,10 +1110,6 @@
|
||||
"args": ["test/runner.node.mjs"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1",
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1",
|
||||
|
||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -63,7 +63,7 @@
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"clangd.arguments": ["-header-insertion=never"],
|
||||
"clangd.arguments": ["-header-insertion=never", "-no-unused-includes"],
|
||||
|
||||
// JavaScript
|
||||
"prettier.enable": true,
|
||||
@@ -78,7 +78,7 @@
|
||||
"prettier.prettierPath": "./node_modules/prettier",
|
||||
|
||||
// TypeScript
|
||||
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
91
.vscode/tasks.json
vendored
91
.vscode/tasks.json
vendored
@@ -2,50 +2,57 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Install Dependencies",
|
||||
"command": "scripts/all-dependencies.sh",
|
||||
"windows": {
|
||||
"command": "scripts/all-dependencies.ps1",
|
||||
},
|
||||
"icon": {
|
||||
"id": "arrow-down",
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Setup Environment",
|
||||
"dependsOn": ["Install Dependencies"],
|
||||
"command": "scripts/setup.sh",
|
||||
"windows": {
|
||||
"command": "scripts/setup.ps1",
|
||||
},
|
||||
"icon": {
|
||||
"id": "check",
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Build Bun",
|
||||
"dependsOn": ["Setup Environment"],
|
||||
"command": "bun",
|
||||
"args": ["run", "build"],
|
||||
"icon": {
|
||||
"id": "gear",
|
||||
"type": "shell",
|
||||
"command": "bun run build",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true,
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
"isBuildCommand": true,
|
||||
"runOptions": {
|
||||
"instanceLimit": 1,
|
||||
"reevaluateOnRerun": true,
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "zig",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s+(.+)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"owner": "clang",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*(.*)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "shared",
|
||||
"clear": true,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
|
||||
22
build.zig
22
build.zig
@@ -327,6 +327,19 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
});
|
||||
}
|
||||
|
||||
// zig build enum-extractor
|
||||
{
|
||||
// const step = b.step("enum-extractor", "Extract enum definitions (invoked by a code generator)");
|
||||
// const exe = b.addExecutable(.{
|
||||
// .name = "enum_extractor",
|
||||
// .root_source_file = b.path("./src/generated_enum_extractor.zig"),
|
||||
// .target = b.graph.host,
|
||||
// .optimize = .Debug,
|
||||
// });
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addMultiCheck(
|
||||
@@ -414,6 +427,15 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
}
|
||||
addInternalPackages(b, obj, opts);
|
||||
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
|
||||
|
||||
const translate_plugin_api = b.addTranslateC(.{
|
||||
.root_source_file = b.path("./packages/bun-native-bundler-plugin-api/bundler_plugin.h"),
|
||||
.target = opts.target,
|
||||
.optimize = opts.optimize,
|
||||
.link_libc = true,
|
||||
});
|
||||
obj.root_module.addImport("bun-native-bundler-plugin-api", translate_plugin_api.createModule());
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
@@ -136,16 +136,6 @@ else()
|
||||
set(WARNING WARNING)
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
if(EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_ABI "musl")
|
||||
else()
|
||||
set(DEFAULT_ABI "gnu")
|
||||
endif()
|
||||
|
||||
optionx(ABI "musl|gnu" "The ABI to use (e.g. musl, gnu)" DEFAULT ${DEFAULT_ABI})
|
||||
endif()
|
||||
|
||||
# TODO: This causes flaky zig builds in CI, so temporarily disable it.
|
||||
# if(CI)
|
||||
# set(DEFAULT_VENDOR_PATH ${CACHE_PATH}/vendor)
|
||||
|
||||
@@ -10,7 +10,6 @@ optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
|
||||
|
||||
if(BUILDKITE)
|
||||
optionx(BUILDKITE_COMMIT STRING "The commit hash")
|
||||
optionx(BUILDKITE_MESSAGE STRING "The commit message")
|
||||
endif()
|
||||
|
||||
optionx(CMAKE_BUILD_TYPE "Debug|Release|RelWithDebInfo|MinSizeRel" "The build type to use" REQUIRED)
|
||||
@@ -21,7 +20,7 @@ else()
|
||||
setx(RELEASE OFF)
|
||||
endif()
|
||||
|
||||
if(CMAKE_BUILD_TYPE MATCHES "Debug|RelWithDebInfo")
|
||||
if(CMAKE_BUILD_TYPE MATCHES "Debug")
|
||||
setx(DEBUG ON)
|
||||
else()
|
||||
setx(DEBUG OFF)
|
||||
@@ -49,6 +48,16 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
if(EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_ABI "musl")
|
||||
else()
|
||||
set(DEFAULT_ABI "gnu")
|
||||
endif()
|
||||
|
||||
optionx(ABI "musl|gnu" "The ABI to use (e.g. musl, gnu)" DEFAULT ${DEFAULT_ABI})
|
||||
endif()
|
||||
|
||||
if(ARCH STREQUAL "x64")
|
||||
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
|
||||
endif()
|
||||
@@ -56,14 +65,7 @@ endif()
|
||||
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
|
||||
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
|
||||
|
||||
if(BUILDKITE_MESSAGE AND BUILDKITE_MESSAGE MATCHES "\\[release build\\]")
|
||||
message(STATUS "Switched to release build, since commit message contains: \"[release build]\"")
|
||||
set(DEFAULT_CANARY OFF)
|
||||
else()
|
||||
set(DEFAULT_CANARY ON)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ${DEFAULT_CANARY})
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
|
||||
|
||||
if(ENABLE_CANARY AND BUILDKITE)
|
||||
execute_process(
|
||||
|
||||
@@ -318,13 +318,13 @@ register_command(
|
||||
TARGET
|
||||
bun-bake-codegen
|
||||
COMMENT
|
||||
"Bundling Kit Runtime"
|
||||
"Bundling Bake Runtime"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
--codegen_root=${CODEGEN_PATH}
|
||||
--codegen-root=${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_BAKE_RUNTIME_SOURCES}
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
|
||||
@@ -334,6 +334,39 @@ register_command(
|
||||
${BUN_BAKE_RUNTIME_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
|
||||
|
||||
file(GLOB_RECURSE BUN_BINDGEN_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/**/*.bind.ts
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/GeneratedBindings.cpp
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_ZIG_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedBindings.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-binding-generator
|
||||
COMMENT
|
||||
"Processing \".bind.ts\" files"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
--codegen-root=${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_BINDGEN_SOURCES}
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts)
|
||||
|
||||
set(BUN_JS_SINK_SOURCES
|
||||
@@ -385,7 +418,6 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
)
|
||||
|
||||
|
||||
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
|
||||
set(_tmp)
|
||||
get_source_file_property(_tmp ${_source} OBJECT_DEPENDS)
|
||||
@@ -461,6 +493,7 @@ list(APPEND BUN_ZIG_SOURCES
|
||||
${CWD}/build.zig
|
||||
${CWD}/root.zig
|
||||
${CWD}/root_wasm.zig
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_ZIG_GENERATED_SOURCES
|
||||
@@ -482,7 +515,6 @@ endif()
|
||||
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
set(ZIG_CPU "apple_m1")
|
||||
@@ -606,6 +638,7 @@ list(APPEND BUN_CPP_SOURCES
|
||||
${BUN_JS_SINK_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_OBJECT_LUT_OUTPUTS}
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -1163,7 +1196,7 @@ if(NOT BUN_CPP_ONLY)
|
||||
|
||||
if(CI)
|
||||
set(bunTriplet bun-${OS}-${ARCH})
|
||||
if(ABI STREQUAL "musl")
|
||||
if(LINUX AND ABI STREQUAL "musl")
|
||||
set(bunTriplet ${bunTriplet}-musl)
|
||||
endif()
|
||||
if(ENABLE_BASELINE)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
c-ares/c-ares
|
||||
COMMIT
|
||||
d1722e6e8acaf10eb73fa995798a9cd421d9f85e
|
||||
41ee334af3e3d0027dca5e477855d0244936bd49
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
ebiggers/libdeflate
|
||||
COMMIT
|
||||
dc76454a39e7e83b68c3704b6e3784654f8d5ac5
|
||||
9d624d1d8ba82c690d6d6be1d0a961fc5a983ea4
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
8d4c273ded322193d017042d1f48df2766b0f88b
|
||||
4f8becea13a0021c8b71abd2dcc5899384973b66
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
@@ -49,6 +49,8 @@ register_command(
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
CARGO_HOME=${CARGO_HOME}
|
||||
RUSTUP_HOME=${RUSTUP_HOME}
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
litespeedtech/ls-hpack
|
||||
COMMIT
|
||||
3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0
|
||||
32e96f10593c7cb8553cd8c9c12721100ae9e924
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
@@ -4,7 +4,7 @@ if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR ABI STREQUAL "musl")
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
else()
|
||||
set(DEFAULT_LLVM_VERSION "16.0.6")
|
||||
|
||||
@@ -1,15 +1,42 @@
|
||||
if(DEFINED ENV{CARGO_HOME})
|
||||
set(CARGO_HOME $ENV{CARGO_HOME})
|
||||
elseif(CMAKE_HOST_WIN32)
|
||||
set(CARGO_HOME $ENV{USERPROFILE}/.cargo)
|
||||
if(NOT EXISTS ${CARGO_HOME})
|
||||
set(CARGO_HOME $ENV{PROGRAMFILES}/Rust/cargo)
|
||||
endif()
|
||||
else()
|
||||
set(CARGO_HOME $ENV{HOME}/.cargo)
|
||||
endif()
|
||||
|
||||
if(DEFINED ENV{RUSTUP_HOME})
|
||||
set(RUSTUP_HOME $ENV{RUSTUP_HOME})
|
||||
elseif(CMAKE_HOST_WIN32)
|
||||
set(RUSTUP_HOME $ENV{USERPROFILE}/.rustup)
|
||||
if(NOT EXISTS ${RUSTUP_HOME})
|
||||
set(RUSTUP_HOME $ENV{PROGRAMFILES}/Rust/rustup)
|
||||
endif()
|
||||
else()
|
||||
set(RUSTUP_HOME $ENV{HOME}/.rustup)
|
||||
endif()
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
CARGO_EXECUTABLE
|
||||
COMMAND
|
||||
cargo
|
||||
PATHS
|
||||
$ENV{HOME}/.cargo/bin
|
||||
${CARGO_HOME}/bin
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
if(EXISTS ${CARGO_EXECUTABLE})
|
||||
if(CARGO_EXECUTABLE MATCHES "^${CARGO_HOME}")
|
||||
setx(CARGO_HOME ${CARGO_HOME})
|
||||
setx(RUSTUP_HOME ${RUSTUP_HOME})
|
||||
endif()
|
||||
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 3bc4abf2d5875baf500b4687ef869987f6d19e00)
|
||||
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
|
||||
endif()
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
@@ -63,7 +63,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
if(ABI STREQUAL "musl")
|
||||
if(LINUX AND ABI STREQUAL "musl")
|
||||
set(WEBKIT_SUFFIX "-musl")
|
||||
endif()
|
||||
|
||||
|
||||
@@ -671,7 +671,7 @@ _bun() {
|
||||
cmd)
|
||||
local -a scripts_list
|
||||
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
|
||||
scripts="scripts:scripts:(($scripts_list))"
|
||||
scripts="scripts:scripts:((${scripts_list//:/\\\\:}))"
|
||||
IFS=$'\n' files_list=($(SHELL=zsh bun getcompletes j))
|
||||
|
||||
main_commands=(
|
||||
@@ -871,8 +871,8 @@ _bun_run_param_script_completion() {
|
||||
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes s))
|
||||
IFS=$'\n' bins=($(SHELL=zsh bun getcompletes b))
|
||||
|
||||
_alternative "scripts:scripts:(($scripts_list))"
|
||||
_alternative "bin:bin:(($bins))"
|
||||
_alternative "scripts:scripts:((${scripts_list//:/\\\\:}))"
|
||||
_alternative "bin:bin:((${bins//:/\\\\:}))"
|
||||
_alternative "files:file:_files -g '*.(js|ts|jsx|tsx|wasm)'"
|
||||
}
|
||||
|
||||
|
||||
@@ -75,7 +75,7 @@ To instead throw an error when a parameter is missing and allow binding without
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const strict = new Database(
|
||||
":memory:",
|
||||
":memory:",
|
||||
{ strict: true }
|
||||
);
|
||||
|
||||
@@ -177,7 +177,7 @@ const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
|
||||
## WAL mode
|
||||
|
||||
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent writes. It's broadly recommended to enable WAL mode for most typical applications.
|
||||
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent readers and a single writer. It's broadly recommended to enable WAL mode for most typical applications.
|
||||
|
||||
To enable WAL mode, run this pragma query at the beginning of your application:
|
||||
|
||||
|
||||
@@ -546,6 +546,113 @@ export type ImportKind =
|
||||
|
||||
By design, the manifest is a simple JSON object that can easily be serialized or written to disk. It is also compatible with esbuild's [`metafile`](https://esbuild.github.io/api/#metafile) format. -->
|
||||
|
||||
### `env`
|
||||
|
||||
Controls how environment variables are handled during bundling. Internally, this uses `define` to inject environment variables into the bundle, but makes it easier to specify the environment variables to inject.
|
||||
|
||||
#### `env: "inline"`
|
||||
|
||||
Injects environment variables into the bundled output by converting `process.env.FOO` references to string literals containing the actual environment variable values.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
env: "inline",
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ FOO=bar BAZ=123 bun build ./index.tsx --outdir ./out --env inline
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For the input below:
|
||||
|
||||
```js#input.js
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js#output.js
|
||||
console.log("bar");
|
||||
console.log("123");
|
||||
```
|
||||
|
||||
#### `env: "PUBLIC_*"` (prefix)
|
||||
|
||||
Inlines environment variables matching the given prefix (the part before the `*` character), replacing `process.env.FOO` with the actual environment variable value. This is useful for selectively inlining environment variables for things like public-facing URLs or client-side tokens, without worrying about injecting private credentials into output bundles.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
|
||||
// Inline all env vars that start with "ACME_PUBLIC_"
|
||||
env: "ACME_PUBLIC_*",
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com bun build ./index.tsx --outdir ./out --env 'ACME_PUBLIC_*'
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For example, given the following environment variables:
|
||||
|
||||
```bash
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
|
||||
```
|
||||
|
||||
And source code:
|
||||
|
||||
```ts#index.tsx
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.ACME_PUBLIC_URL);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js
|
||||
console.log(process.env.FOO);
|
||||
console.log("https://acme.com");
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
#### `env: "disable"`
|
||||
|
||||
Disables environment variable injection entirely.
|
||||
|
||||
For example, given the following environment variables:
|
||||
|
||||
```bash
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
|
||||
```
|
||||
|
||||
And source code:
|
||||
|
||||
```ts#index.tsx
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.ACME_PUBLIC_URL);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
### `sourcemap`
|
||||
|
||||
Specifies the type of sourcemap to generate.
|
||||
|
||||
@@ -75,8 +75,10 @@ jobs:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies # (assuming your project has dependencies)
|
||||
run: bun install # You can use npm/yarn/pnpm instead if you prefer
|
||||
- name: Run tests
|
||||
@@ -124,7 +126,7 @@ Use the `--bail` flag to abort the test run early after a pre-determined number
|
||||
$ bun test --bail
|
||||
|
||||
# bail after 10 failure
|
||||
$ bun test --bail 10
|
||||
$ bun test --bail=10
|
||||
```
|
||||
|
||||
## Watch mode
|
||||
|
||||
@@ -16,7 +16,7 @@ Set these variables in a `.env` file.
|
||||
Bun reads the following files automatically (listed in order of increasing precedence).
|
||||
|
||||
- `.env`
|
||||
- `.env.production` or `.env.development` (depending on value of `NODE_ENV`)
|
||||
- `.env.production`, `.env.development`, `.env.test` (depending on value of `NODE_ENV`)
|
||||
- `.env.local`
|
||||
|
||||
```txt#.env
|
||||
|
||||
@@ -14,7 +14,7 @@ To bail after a certain threshold of failures, optionally specify a number after
|
||||
|
||||
```sh
|
||||
# bail after 10 failures
|
||||
$ bun test --bail 10
|
||||
$ bun test --bail=10
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -30,7 +30,6 @@ Bun implements the vast majority of Jest's matchers, but compatibility isn't 100
|
||||
|
||||
Some notable missing features:
|
||||
|
||||
- `expect().toMatchInlineSnapshot()`
|
||||
- `expect().toHaveReturned()`
|
||||
|
||||
---
|
||||
@@ -57,7 +56,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
|
||||
``` -->
|
||||
|
||||
```sh
|
||||
$ bun test --bail 3
|
||||
$ bun test --bail=3
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -4,10 +4,6 @@ name: Use snapshot testing in `bun test`
|
||||
|
||||
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
|
||||
|
||||
{% callout %}
|
||||
The `.toMatchInlineSnapshot()` method is not yet supported.
|
||||
{% /callout %}
|
||||
|
||||
```ts#snap.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
@@ -96,4 +92,4 @@ Ran 1 tests across 1 files. [102.00ms]
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.
|
||||
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.
|
||||
|
||||
@@ -49,7 +49,7 @@ Next, add these preload scripts to your `bunfig.toml` (you can also have everyth
|
||||
|
||||
```toml#bunfig.toml
|
||||
[test]
|
||||
preload = ["happydom.ts", "testing-library.ts"]
|
||||
preload = ["./happydom.ts", "./testing-library.ts"]
|
||||
```
|
||||
---
|
||||
|
||||
@@ -84,4 +84,4 @@ test('Can use Testing Library', () => {
|
||||
|
||||
---
|
||||
|
||||
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.
|
||||
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.
|
||||
|
||||
@@ -4,10 +4,6 @@ name: Update snapshots in `bun test`
|
||||
|
||||
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
|
||||
|
||||
{% callout %}
|
||||
The `.toMatchInlineSnapshot()` method is not yet supported.
|
||||
{% /callout %}
|
||||
|
||||
```ts#snap.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
@@ -47,4 +43,4 @@ Ran 1 tests across 1 files. [102.00ms]
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.
|
||||
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.
|
||||
|
||||
@@ -73,8 +73,7 @@ There are also image variants for different operating systems.
|
||||
$ docker pull oven/bun:debian
|
||||
$ docker pull oven/bun:slim
|
||||
$ docker pull oven/bun:distroless
|
||||
# alpine not recommended until #918 is fixed
|
||||
# $ docker pull oven/bun:alpine
|
||||
$ docker pull oven/bun:alpine
|
||||
```
|
||||
|
||||
## Checking installation
|
||||
@@ -190,14 +189,19 @@ For convenience, here are download links for the latest version:
|
||||
|
||||
- [`bun-linux-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip)
|
||||
- [`bun-linux-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-baseline.zip)
|
||||
- [`bun-linux-x64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl.zip)
|
||||
- [`bun-linux-x64-musl-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl-baseline.zip)
|
||||
- [`bun-windows-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64.zip)
|
||||
- [`bun-windows-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64-baseline.zip)
|
||||
- [`bun-darwin-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-aarch64.zip)
|
||||
- [`bun-linux-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64.zip)
|
||||
- [`bun-linux-aarch64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64-musl.zip)
|
||||
- [`bun-darwin-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64.zip)
|
||||
- [`bun-darwin-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64-baseline.zip)
|
||||
|
||||
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically choose the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
|
||||
|
||||
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
|
||||
<!--
|
||||
## Native
|
||||
|
||||
@@ -402,6 +402,9 @@ export default {
|
||||
page("project/building-windows", "Building Windows", {
|
||||
description: "Learn how to setup a development environment for contributing to the Windows build of Bun.",
|
||||
}),
|
||||
page("project/bindgen", "Bindgen", {
|
||||
description: "About the bindgen code generator",
|
||||
}),
|
||||
page("project/licensing", "License", {
|
||||
description: `Bun is a MIT-licensed project with a large number of statically-linked dependencies with various licenses.`,
|
||||
}),
|
||||
|
||||
199
docs/project/bindgen.md
Normal file
199
docs/project/bindgen.md
Normal file
@@ -0,0 +1,199 @@
|
||||
{% callout %}
|
||||
|
||||
This document is for maintainers and contributors to Bun, and describes internal implementation details.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
The new bindings generator, introduced to the codebase in Dec 2024, scans for
|
||||
`*.bind.ts` to find function and class definition, and generates glue code to
|
||||
interop between JavaScript and native code.
|
||||
|
||||
There are currently other code generators and systems that achieve similar
|
||||
purposes. The following will all eventually be completely phased out in favor of
|
||||
this one:
|
||||
|
||||
- "Classes generator", converting `*.classes.ts` for custom classes.
|
||||
- "JS2Native", allowing ad-hoc calls from `src/js` to native code.
|
||||
|
||||
## Creating JS Functions in Zig
|
||||
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
// Remember to be descriptive.
|
||||
return global.throwPretty("Integer overflow while adding", .{});
|
||||
};
|
||||
}
|
||||
|
||||
const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
```ts#src/bun.js/math.bind.ts
|
||||
import { t, fn } from 'bindgen';
|
||||
|
||||
export const add = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
a: t.i32,
|
||||
b: t.i32.default(1),
|
||||
},
|
||||
ret: t.i32,
|
||||
});
|
||||
```
|
||||
|
||||
This function declaration is equivalent to:
|
||||
|
||||
```ts
|
||||
/**
|
||||
* Throws if zero arguments are provided.
|
||||
* Wraps out of range numbers using modulo.
|
||||
*/
|
||||
declare function add(a: number, b: number = 1): number;
|
||||
```
|
||||
|
||||
The code generator will provide `bun.gen.math.jsAdd`, which is the native function implementation. To pass to JavaScript, use `bun.gen.math.createAddCallback(global)`
|
||||
|
||||
## Strings
|
||||
|
||||
The type for receiving strings is one of [`t.DOMString`](https://webidl.spec.whatwg.org/#idl-DOMString), [`t.ByteString`](https://webidl.spec.whatwg.org/#idl-ByteString), and [`t.USVString`](https://webidl.spec.whatwg.org/#idl-USVString). These map directly to their WebIDL counterparts, and have slightly different conversion logic. Bindgen will pass BunString to native code in all cases.
|
||||
|
||||
When in doubt, use DOMString.
|
||||
|
||||
`t.UTF8String` can be used in place of `t.DOMString`, but will call `bun.String.toUTF8`. The native callback gets `[]const u8` (WTF-8 data) passed to native code, freeing it after the function returns.
|
||||
|
||||
TLDRs from WebIDL spec:
|
||||
|
||||
- ByteString can only contain valid latin1 characters. It is not safe to assume bun.String is already in 8-bit format, but it is extremely likely.
|
||||
- USVString will not contain invalid surrogate pairs, aka text that can be represented correctly in UTF-8.
|
||||
- DOMString is the loosest but also most recommended strategy.
|
||||
|
||||
## Function Variants
|
||||
|
||||
A `variants` can specify multiple variants (also known as overloads).
|
||||
|
||||
```ts#src/bun.js/math.bind.ts
|
||||
import { t, fn } from 'bindgen';
|
||||
|
||||
export const action = fn({
|
||||
variants: [
|
||||
{
|
||||
args: {
|
||||
a: t.i32,
|
||||
},
|
||||
ret: t.i32,
|
||||
},
|
||||
{
|
||||
args: {
|
||||
a: t.DOMString,
|
||||
},
|
||||
ret: t.DOMString,
|
||||
},
|
||||
]
|
||||
});
|
||||
```
|
||||
|
||||
In Zig, each variant gets a number, based on the order the schema defines.
|
||||
|
||||
```
|
||||
fn action1(a: i32) i32 {
|
||||
return a;
|
||||
}
|
||||
|
||||
fn action2(a: bun.String) bun.String {
|
||||
return a;
|
||||
}
|
||||
```
|
||||
|
||||
## `t.dictionary`
|
||||
|
||||
A `dictionary` is a definition for a JavaScript object, typically as a function inputs. For function outputs, it is usually a smarter idea to declare a class type to add functions and destructuring.
|
||||
|
||||
## Enumerations
|
||||
|
||||
To use [WebIDL's enumeration](https://webidl.spec.whatwg.org/#idl-enums) type, use either:
|
||||
|
||||
- `t.stringEnum`: Create and codegen a new enum type.
|
||||
- `t.zigEnum`: Derive a bindgen type off of an existing enum in the codebase.
|
||||
|
||||
An example of `stringEnum` as used in `fmt.zig` / `bun:internal-for-testing`
|
||||
|
||||
```ts
|
||||
export const Formatter = t.stringEnum(
|
||||
"highlight-javascript",
|
||||
"escape-powershell",
|
||||
);
|
||||
|
||||
export const fmtString = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
code: t.UTF8String,
|
||||
formatter: Formatter,
|
||||
},
|
||||
ret: t.DOMString,
|
||||
});
|
||||
```
|
||||
|
||||
WebIDL strongly encourages using kebab case for enumeration values, to be consistent with existing Web APIs.
|
||||
|
||||
### Deriving enums from Zig code
|
||||
|
||||
TODO: zigEnum
|
||||
|
||||
## `t.oneOf`
|
||||
|
||||
A `oneOf` is a union between two or more types. It is represented by `union(enum)` in Zig.
|
||||
|
||||
TODO:
|
||||
|
||||
## Attributes
|
||||
|
||||
There are set of attributes that can be chained onto `t.*` types. On all types there are:
|
||||
|
||||
- `.required`, in dictionary parameters only
|
||||
- `.optional`, in function arguments only
|
||||
- `.default(T)`
|
||||
|
||||
When a value is optional, it is lowered to a Zig optional.
|
||||
|
||||
Depending on the type, there are more attributes available. See the type definitions in auto-complete for more details. Note that one of the above three can only be applied, and they must be applied at the end.
|
||||
|
||||
### Integer Attributes
|
||||
|
||||
Integer types allow customizing the overflow behavior with `clamp` or `enforceRange`
|
||||
|
||||
```ts
|
||||
import { t, fn } from "bindgen";
|
||||
|
||||
export const add = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
// enforce in i32 range
|
||||
a: t.i32.enforceRange(),
|
||||
// clamp to u16 range
|
||||
c: t.u16,
|
||||
// enforce in arbitrary range, with a default if not provided
|
||||
b: t.i32.enforceRange(0, 1000).default(5),
|
||||
// clamp to arbitrary range, or null
|
||||
d: t.u16.clamp(0, 10).optional,
|
||||
},
|
||||
ret: t.i32,
|
||||
});
|
||||
```
|
||||
|
||||
## Callbacks
|
||||
|
||||
TODO
|
||||
|
||||
## Classes
|
||||
|
||||
TODO
|
||||
@@ -355,7 +355,7 @@ Bun.build({
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## Lifecycle callbacks
|
||||
## Lifecycle hooks
|
||||
|
||||
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
|
||||
|
||||
@@ -363,6 +363,8 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
|
||||
- [`onResolve()`](#onresolve): Run before a module is resolved
|
||||
- [`onLoad()`](#onload): Run before a module is loaded.
|
||||
|
||||
### Reference
|
||||
|
||||
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
|
||||
|
||||
```ts
|
||||
@@ -603,3 +605,98 @@ plugin({
|
||||
```
|
||||
|
||||
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
|
||||
|
||||
## Native plugins
|
||||
|
||||
{% callout %}
|
||||
**NOTE** — This is an advanced and experiemental API recommended for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
|
||||
{% /callout %}
|
||||
|
||||
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
|
||||
|
||||
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
|
||||
|
||||
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
|
||||
|
||||
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
|
||||
|
||||
These are the following lifecycle hooks which are available to native plugins:
|
||||
|
||||
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
|
||||
|
||||
### Creating a native plugin
|
||||
|
||||
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
|
||||
|
||||
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
|
||||
|
||||
#### Example: Rust with napi-rs
|
||||
|
||||
First initialize a napi project (see [here](https://napi.rs/docs/introduction/getting-started) for a more comprehensive guide).
|
||||
|
||||
Then install Bun's official safe plugin wrapper crate:
|
||||
|
||||
```bash
|
||||
cargo add bun-native-plugin
|
||||
```
|
||||
|
||||
Now you can export an `extern "C" fn` which is the implementation of your plugin:
|
||||
|
||||
```rust
|
||||
#[no_mangle]
|
||||
extern "C" fn on_before_parse_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
let result = unsafe { &mut *result };
|
||||
|
||||
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_code = match handle.input_source_code() {
|
||||
Ok(source_code) => source_code,
|
||||
Err(_) => {
|
||||
handle.log_error("Fetching source code failed!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let loader = handle.output_loader();
|
||||
handle.set_output_source_code(source_code.replace("foo", "bar"), loader);
|
||||
```
|
||||
|
||||
Use napi-rs to compile the plugin to a `.node` file, then you can `require()` it from JS and use it:
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["index.ts"],
|
||||
setup(build) {
|
||||
const myNativePlugin = require("./path/to/plugin.node");
|
||||
|
||||
build.onBeforeParse(
|
||||
{ filter: /\.ts/ },
|
||||
{ napiModule: myNativePlugin, symbol: "on_before_parse_impl" },
|
||||
);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### `onBeforeParse`
|
||||
|
||||
```ts
|
||||
onBeforeParse(
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
|
||||
): void;
|
||||
```
|
||||
|
||||
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
|
||||
|
||||
As input, it receives the file's contents and can optionally return new source code.
|
||||
|
||||
This callback can be called from any thread and so the napi module implementation must be thread-safe.
|
||||
|
||||
@@ -531,17 +531,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toMatchInlineSnapshot()`](https://jestjs.io/docs/expect#tomatchinlinesnapshotpropertymatchers-inlinesnapshot)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toThrowErrorMatchingSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchingsnapshothint)
|
||||
|
||||
---
|
||||
|
||||
- ❌
|
||||
- ✅
|
||||
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
|
||||
|
||||
{% /table %}
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
## Troubleshooting
|
||||
|
||||
### Bun not running on an M1 (or Apple Silicon)
|
||||
|
||||
If you see a message like this
|
||||
|
||||
> [1] 28447 killed bun create next ./test
|
||||
|
||||
It most likely means you’re running Bun’s x64 version on Apple Silicon. This happens if Bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
|
||||
|
||||
The fix is to ensure you installed a version of Bun built for Apple Silicon.
|
||||
|
||||
### error: Unexpected
|
||||
|
||||
If you see an error like this:
|
||||
|
||||

|
||||
|
||||
It usually means the max number of open file descriptors is being explicitly set to a low number. By default, Bun requests the max number of file descriptors available (which on macOS, is something like 32,000). But, if you previously ran into ulimit issues with, e.g., Chokidar, someone on The Internet may have advised you to run `ulimit -n 8192`.
|
||||
|
||||
That advice unfortunately **lowers** the hard limit to `8192`. This can be a problem in large repositories or projects with lots of dependencies. Chokidar (and other watchers) don’t seem to call `setrlimit`, which means they’re reliant on the (much lower) soft limit.
|
||||
|
||||
To fix this issue:
|
||||
|
||||
1. Remove any scripts that call `ulimit -n` and restart your shell.
|
||||
2. Try again, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 2147483646`
|
||||
3. Try again, and if that still doesn’t fix it, open an issue
|
||||
|
||||
### Unzip is required
|
||||
|
||||
Unzip is required to install Bun on Linux. You can use one of the following commands to install `unzip`:
|
||||
|
||||
#### Debian / Ubuntu / Mint
|
||||
|
||||
```sh
|
||||
$ sudo apt install unzip
|
||||
```
|
||||
|
||||
#### RedHat / CentOS / Fedora
|
||||
|
||||
```sh
|
||||
$ sudo dnf install unzip
|
||||
```
|
||||
|
||||
#### Arch / Manjaro
|
||||
|
||||
```sh
|
||||
$ sudo pacman -S unzip
|
||||
```
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
```sh
|
||||
$ sudo zypper install unzip
|
||||
```
|
||||
|
||||
### bun install is stuck
|
||||
|
||||
Please run `bun install --verbose 2> logs.txt` and send them to me in Bun's discord. If you're on Linux, it would also be helpful if you run `sudo perf trace bun install --silent` and attach the logs.
|
||||
|
||||
### Uninstalling
|
||||
|
||||
Bun's binary and install cache is located in `~/.bun` by default. To uninstall bun, delete this directory and edit your shell config (`.bashrc`, `.zshrc`, or similar) to remove `~/.bun/bin` from the `$PATH` variable.
|
||||
|
||||
```sh
|
||||
$ rm -rf ~/.bun # make sure to remove ~/.bun/bin from $PATH
|
||||
```
|
||||
15
package.json
15
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.36",
|
||||
"version": "1.1.39",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
@@ -21,7 +21,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.4.5",
|
||||
"typescript": "^5.7.2",
|
||||
"caniuse-lite": "^1.0.30001620",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"@mdn/browser-compat-data": "~5.5.28"
|
||||
@@ -30,8 +30,8 @@
|
||||
"bun-types": "workspace:packages/bun-types"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:debug": "bun run build",
|
||||
"build": "bun run build:debug",
|
||||
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
|
||||
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
@@ -39,8 +39,8 @@
|
||||
"build:logs": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=ON -B build/release-logs",
|
||||
"build:safe": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=ReleaseSafe -B build/release-safe",
|
||||
"build:smol": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=MinSizeRel -B build/release-smol",
|
||||
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug",
|
||||
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release",
|
||||
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug-local",
|
||||
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release-local",
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"css-properties": "bun run src/css/properties/generate_properties.ts",
|
||||
@@ -73,6 +73,7 @@
|
||||
"prettier": "bun run analysis:no-llvm --target prettier",
|
||||
"prettier:check": "bun run analysis:no-llvm --target prettier-check",
|
||||
"prettier:extra": "bun run analysis:no-llvm --target prettier-extra",
|
||||
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff"
|
||||
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff",
|
||||
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests "
|
||||
}
|
||||
}
|
||||
|
||||
5
packages/bun-build-mdx-rs/.cargo/config.toml
Normal file
5
packages/bun-build-mdx-rs/.cargo/config.toml
Normal file
@@ -0,0 +1,5 @@
|
||||
[target.aarch64-unknown-linux-musl]
|
||||
linker = "aarch64-linux-musl-gcc"
|
||||
rustflags = ["-C", "target-feature=-crt-static"]
|
||||
[target.x86_64-pc-windows-msvc]
|
||||
rustflags = ["-C", "target-feature=+crt-static"]
|
||||
202
packages/bun-build-mdx-rs/.gitignore
vendored
Normal file
202
packages/bun-build-mdx-rs/.gitignore
vendored
Normal file
@@ -0,0 +1,202 @@
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/node
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=node
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# TypeScript v1 declaration files
|
||||
typings/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variables file
|
||||
.env
|
||||
.env.test
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/node
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/macos
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### macOS Patch ###
|
||||
# iCloud generated files
|
||||
*.icloud
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/macos
|
||||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/windows
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/windows
|
||||
|
||||
#Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
||||
*.node
|
||||
|
||||
dist/
|
||||
|
||||
index.js
|
||||
index.d.ts
|
||||
13
packages/bun-build-mdx-rs/.npmignore
Normal file
13
packages/bun-build-mdx-rs/.npmignore
Normal file
@@ -0,0 +1,13 @@
|
||||
target
|
||||
Cargo.lock
|
||||
.cargo
|
||||
.github
|
||||
npm
|
||||
.eslintrc
|
||||
.prettierignore
|
||||
rustfmt.toml
|
||||
yarn.lock
|
||||
*.node
|
||||
.yarn
|
||||
__test__
|
||||
renovate.json
|
||||
21
packages/bun-build-mdx-rs/Cargo.toml
Normal file
21
packages/bun-build-mdx-rs/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
edition = "2021"
|
||||
name = "bun-mdx-rs"
|
||||
version = "0.0.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[dependencies]
|
||||
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
|
||||
napi = { version = "2.12.2", default-features = false, features = ["napi4"] }
|
||||
napi-derive = "2.12.2"
|
||||
mdxjs = "0.2.11"
|
||||
bun-native-plugin = { path = "../bun-native-plugin-rs" }
|
||||
|
||||
[build-dependencies]
|
||||
napi-build = "2.0.1"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
strip = "symbols"
|
||||
34
packages/bun-build-mdx-rs/README.md
Normal file
34
packages/bun-build-mdx-rs/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# bun-build-mdx-rs
|
||||
|
||||
This is a proof of concept for using a third-party native addon in `Bun.build()`.
|
||||
|
||||
This uses `mdxjs-rs` to convert MDX to JSX.
|
||||
|
||||
TODO: **This needs to be built & published to npm.**
|
||||
|
||||
## Building locally:
|
||||
|
||||
```sh
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
```js
|
||||
import { build } from "bun";
|
||||
import mdx from "./index.js";
|
||||
|
||||
// TODO: This needs to be prebuilt for the current platform
|
||||
// Probably use a napi-rs template for this
|
||||
import addon from "./target/release/libmdx_bun.dylib" with { type: "file" };
|
||||
|
||||
const results = await build({
|
||||
entrypoints: ["./hello.jsx"],
|
||||
plugins: [mdx({ addon })],
|
||||
minify: true,
|
||||
outdir: "./dist",
|
||||
define: {
|
||||
"process.env.NODE_ENV": JSON.stringify("production"),
|
||||
},
|
||||
});
|
||||
|
||||
console.log(results);
|
||||
```
|
||||
7
packages/bun-build-mdx-rs/__test__/index.spec.mjs
Normal file
7
packages/bun-build-mdx-rs/__test__/index.spec.mjs
Normal file
@@ -0,0 +1,7 @@
|
||||
import test from 'ava'
|
||||
|
||||
import { sum } from '../index.js'
|
||||
|
||||
test('sum from native', (t) => {
|
||||
t.is(sum(1, 2), 3)
|
||||
})
|
||||
5
packages/bun-build-mdx-rs/build.rs
Normal file
5
packages/bun-build-mdx-rs/build.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
extern crate napi_build;
|
||||
|
||||
fn main() {
|
||||
napi_build::setup();
|
||||
}
|
||||
6
packages/bun-build-mdx-rs/input/index.ts
Normal file
6
packages/bun-build-mdx-rs/input/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import page1 from "./page1.mdx";
|
||||
import page2 from "./page2.mdx";
|
||||
import page3 from "./page3.mdx";
|
||||
import page4 from "./page4.mdx";
|
||||
|
||||
console.log(page1, page2, page3, page4);
|
||||
11
packages/bun-build-mdx-rs/input/page1.mdx
Normal file
11
packages/bun-build-mdx-rs/input/page1.mdx
Normal file
@@ -0,0 +1,11 @@
|
||||
# Hello World
|
||||
|
||||
This is a sample MDX file that demonstrates various MDX features.
|
||||
|
||||
## Components
|
||||
|
||||
You can use JSX components directly in MDX:
|
||||
|
||||
<Button onClick={() => alert("Hello!")}>Click me</Button>
|
||||
|
||||
## Code Blocks
|
||||
11
packages/bun-build-mdx-rs/input/page2.mdx
Normal file
11
packages/bun-build-mdx-rs/input/page2.mdx
Normal file
@@ -0,0 +1,11 @@
|
||||
# Hello World
|
||||
|
||||
This is a sample MDX file that demonstrates various MDX features.
|
||||
|
||||
## Components
|
||||
|
||||
You can use JSX components directly in MDX:
|
||||
|
||||
<Button onClick={() => alert("Hello!")}>Click me</Button>
|
||||
|
||||
## Code Blocks
|
||||
11
packages/bun-build-mdx-rs/input/page3.mdx
Normal file
11
packages/bun-build-mdx-rs/input/page3.mdx
Normal file
@@ -0,0 +1,11 @@
|
||||
# Hello World
|
||||
|
||||
This is a sample MDX file that demonstrates various MDX features.
|
||||
|
||||
## Components
|
||||
|
||||
You can use JSX components directly in MDX:
|
||||
|
||||
<Button onClick={() => alert("Hello!")}>Click me</Button>
|
||||
|
||||
## Code Blocks
|
||||
11
packages/bun-build-mdx-rs/input/page4.mdx
Normal file
11
packages/bun-build-mdx-rs/input/page4.mdx
Normal file
@@ -0,0 +1,11 @@
|
||||
# Hello World
|
||||
|
||||
This is a sample MDX file that demonstrates various MDX features.
|
||||
|
||||
## Components
|
||||
|
||||
You can use JSX components directly in MDX:
|
||||
|
||||
<Button onClick={() => alert("Hello!")}>Click me</Button>
|
||||
|
||||
## Code Blocks
|
||||
3
packages/bun-build-mdx-rs/npm/darwin-arm64/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/darwin-arm64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-darwin-arm64`
|
||||
|
||||
This is the **aarch64-apple-darwin** binary for `bun-mdx-rs`
|
||||
18
packages/bun-build-mdx-rs/npm/darwin-arm64/package.json
Normal file
18
packages/bun-build-mdx-rs/npm/darwin-arm64/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-darwin-arm64",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"main": "bun-mdx-rs.darwin-arm64.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.darwin-arm64.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/darwin-x64/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/darwin-x64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-darwin-x64`
|
||||
|
||||
This is the **x86_64-apple-darwin** binary for `bun-mdx-rs`
|
||||
18
packages/bun-build-mdx-rs/npm/darwin-x64/package.json
Normal file
18
packages/bun-build-mdx-rs/npm/darwin-x64/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-darwin-x64",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "bun-mdx-rs.darwin-x64.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.darwin-x64.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/linux-arm64-gnu/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/linux-arm64-gnu/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-linux-arm64-gnu`
|
||||
|
||||
This is the **aarch64-unknown-linux-gnu** binary for `bun-mdx-rs`
|
||||
21
packages/bun-build-mdx-rs/npm/linux-arm64-gnu/package.json
Normal file
21
packages/bun-build-mdx-rs/npm/linux-arm64-gnu/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-linux-arm64-gnu",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"main": "bun-mdx-rs.linux-arm64-gnu.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.linux-arm64-gnu.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"libc": [
|
||||
"glibc"
|
||||
]
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/linux-arm64-musl/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/linux-arm64-musl/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-linux-arm64-musl`
|
||||
|
||||
This is the **aarch64-unknown-linux-musl** binary for `bun-mdx-rs`
|
||||
21
packages/bun-build-mdx-rs/npm/linux-arm64-musl/package.json
Normal file
21
packages/bun-build-mdx-rs/npm/linux-arm64-musl/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-linux-arm64-musl",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"main": "bun-mdx-rs.linux-arm64-musl.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.linux-arm64-musl.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"libc": [
|
||||
"musl"
|
||||
]
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/linux-x64-gnu/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/linux-x64-gnu/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-linux-x64-gnu`
|
||||
|
||||
This is the **x86_64-unknown-linux-gnu** binary for `bun-mdx-rs`
|
||||
21
packages/bun-build-mdx-rs/npm/linux-x64-gnu/package.json
Normal file
21
packages/bun-build-mdx-rs/npm/linux-x64-gnu/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-linux-x64-gnu",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "bun-mdx-rs.linux-x64-gnu.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.linux-x64-gnu.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"libc": [
|
||||
"glibc"
|
||||
]
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/linux-x64-musl/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/linux-x64-musl/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-linux-x64-musl`
|
||||
|
||||
This is the **x86_64-unknown-linux-musl** binary for `bun-mdx-rs`
|
||||
21
packages/bun-build-mdx-rs/npm/linux-x64-musl/package.json
Normal file
21
packages/bun-build-mdx-rs/npm/linux-x64-musl/package.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-linux-x64-musl",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "bun-mdx-rs.linux-x64-musl.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.linux-x64-musl.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"libc": [
|
||||
"musl"
|
||||
]
|
||||
}
|
||||
3
packages/bun-build-mdx-rs/npm/win32-x64-msvc/README.md
Normal file
3
packages/bun-build-mdx-rs/npm/win32-x64-msvc/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# `bun-mdx-rs-win32-x64-msvc`
|
||||
|
||||
This is the **x86_64-pc-windows-msvc** binary for `bun-mdx-rs`
|
||||
18
packages/bun-build-mdx-rs/npm/win32-x64-msvc/package.json
Normal file
18
packages/bun-build-mdx-rs/npm/win32-x64-msvc/package.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"name": "bun-mdx-rs-win32-x64-msvc",
|
||||
"version": "0.0.0",
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"main": "bun-mdx-rs.win32-x64-msvc.node",
|
||||
"files": [
|
||||
"bun-mdx-rs.win32-x64-msvc.node"
|
||||
],
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
}
|
||||
37
packages/bun-build-mdx-rs/package.json
Normal file
37
packages/bun-build-mdx-rs/package.json
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"name": "bun-mdx-rs",
|
||||
"version": "0.0.0",
|
||||
"main": "index.js",
|
||||
"types": "index.d.ts",
|
||||
"napi": {
|
||||
"name": "bun-mdx-rs",
|
||||
"triples": {
|
||||
"additional": [
|
||||
"aarch64-apple-darwin",
|
||||
"aarch64-unknown-linux-gnu",
|
||||
"aarch64-unknown-linux-musl",
|
||||
"x86_64-unknown-linux-musl"
|
||||
]
|
||||
}
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@napi-rs/cli": "^2.18.4",
|
||||
"ava": "^6.0.1"
|
||||
},
|
||||
"ava": {
|
||||
"timeout": "3m"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
},
|
||||
"scripts": {
|
||||
"artifacts": "napi artifacts",
|
||||
"build": "napi build --platform --release",
|
||||
"build:debug": "napi build --platform",
|
||||
"prepublishOnly": "napi prepublish -t npm",
|
||||
"test": "ava",
|
||||
"universal": "napi universal",
|
||||
"version": "napi version"
|
||||
}
|
||||
}
|
||||
2
packages/bun-build-mdx-rs/rustfmt.toml
Normal file
2
packages/bun-build-mdx-rs/rustfmt.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
tab_spaces = 2
|
||||
edition = "2021"
|
||||
55
packages/bun-build-mdx-rs/src/lib.rs
Normal file
55
packages/bun-build-mdx-rs/src/lib.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use bun_native_plugin::{define_bun_plugin, BunLoader, OnBeforeParse};
|
||||
use mdxjs::{compile, Options as CompileOptions};
|
||||
use napi_derive::napi;
|
||||
|
||||
#[macro_use]
|
||||
extern crate napi;
|
||||
|
||||
define_bun_plugin!("bun-mdx-rs");
|
||||
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bun_mdx_rs(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
|
||||
let mut handle = match OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_str = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to fetch source code");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut options = CompileOptions::gfm();
|
||||
|
||||
// Leave it as JSX for Bun to handle
|
||||
options.jsx = true;
|
||||
|
||||
let path = match handle.path() {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
handle.log_error(&format!("Failed to get path: {:?}", e));
|
||||
return;
|
||||
}
|
||||
};
|
||||
options.filepath = Some(path.to_string());
|
||||
|
||||
match compile(&source_str, &options) {
|
||||
Ok(compiled) => {
|
||||
handle.set_output_source_code(compiled, BunLoader::BUN_LOADER_JSX);
|
||||
}
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to compile MDX");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "bun-debug-adapter-protocol",
|
||||
"version": "0.0.1",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"semver": "^7.5.4",
|
||||
"source-map-js": "^1.0.2"
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import type { InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector";
|
||||
import type { JSC } from "../../../bun-inspector-protocol/src/protocol";
|
||||
import type { DAP } from "../protocol";
|
||||
// @ts-ignore
|
||||
import { ChildProcess, spawn } from "node:child_process";
|
||||
import { EventEmitter } from "node:events";
|
||||
import { AddressInfo, createServer } from "node:net";
|
||||
import { AddressInfo, createServer, Socket } from "node:net";
|
||||
import * as path from "node:path";
|
||||
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index";
|
||||
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal";
|
||||
import { Location, SourceMap } from "./sourcemap";
|
||||
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index.ts";
|
||||
import type { Inspector, InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector/index.d.ts";
|
||||
import { NodeSocketInspector } from "../../../bun-inspector-protocol/src/inspector/node-socket.ts";
|
||||
import type { JSC } from "../../../bun-inspector-protocol/src/protocol/index.d.ts";
|
||||
import type { DAP } from "../protocol/index.d.ts";
|
||||
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal.ts";
|
||||
import { Location, SourceMap } from "./sourcemap.ts";
|
||||
|
||||
export async function getAvailablePort(): Promise<number> {
|
||||
const server = createServer();
|
||||
server.listen(0);
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise(resolve => {
|
||||
server.on("listening", () => {
|
||||
const { port } = server.address() as AddressInfo;
|
||||
server.close(() => {
|
||||
@@ -105,7 +105,18 @@ const capabilities: DAP.Capabilities = {
|
||||
|
||||
type InitializeRequest = DAP.InitializeRequest & {
|
||||
supportsConfigurationDoneRequest?: boolean;
|
||||
};
|
||||
enableControlFlowProfiler?: boolean;
|
||||
enableDebugger?: boolean;
|
||||
} & (
|
||||
| {
|
||||
enableLifecycleAgentReporter?: false;
|
||||
sendImmediatePreventExit?: false;
|
||||
}
|
||||
| {
|
||||
enableLifecycleAgentReporter: true;
|
||||
sendImmediatePreventExit?: boolean;
|
||||
}
|
||||
);
|
||||
|
||||
type LaunchRequest = DAP.LaunchRequest & {
|
||||
runtime?: string;
|
||||
@@ -231,10 +242,14 @@ function normalizeSourcePath(sourcePath: string, untitledDocPath?: string, bunEv
|
||||
return path.normalize(sourcePath);
|
||||
}
|
||||
|
||||
export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements IDebugAdapter {
|
||||
export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
extends EventEmitter<DebugAdapterEventMap>
|
||||
implements IDebugAdapter
|
||||
{
|
||||
protected readonly inspector: T;
|
||||
protected options?: DebuggerOptions;
|
||||
|
||||
#threadId: number;
|
||||
#inspector: WebSocketInspector;
|
||||
#process?: ChildProcess;
|
||||
#sourceId: number;
|
||||
#pendingSources: Map<string, ((source: Source) => void)[]>;
|
||||
#sources: Map<string | number, Source>;
|
||||
@@ -247,20 +262,21 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
#targets: Map<number, Target>;
|
||||
#variableId: number;
|
||||
#variables: Map<number, Variable>;
|
||||
#initialized?: InitializeRequest;
|
||||
#options?: DebuggerOptions;
|
||||
#untitledDocPath?: string;
|
||||
#bunEvalPath?: string;
|
||||
#initialized?: InitializeRequest;
|
||||
|
||||
constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
protected constructor(inspector: T, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super();
|
||||
this.#untitledDocPath = untitledDocPath;
|
||||
this.#bunEvalPath = bunEvalPath;
|
||||
this.#threadId = threadId++;
|
||||
this.#inspector = new WebSocketInspector(url);
|
||||
const emit = this.#inspector.emit.bind(this.#inspector);
|
||||
this.#inspector.emit = (event, ...args) => {
|
||||
this.inspector = inspector;
|
||||
const emit = this.inspector.emit.bind(this.inspector);
|
||||
this.inspector.emit = (event, ...args) => {
|
||||
let sent = false;
|
||||
sent ||= emit(event, ...args);
|
||||
sent ||= this.emit(event, ...(args as any));
|
||||
sent ||= this.emit(event as keyof JSC.EventMap, ...(args as any));
|
||||
return sent;
|
||||
};
|
||||
this.#sourceId = 1;
|
||||
@@ -274,26 +290,27 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#targets = new Map();
|
||||
this.#variableId = 1;
|
||||
this.#variables = new Map();
|
||||
this.#untitledDocPath = untitledDocPath;
|
||||
this.#bunEvalPath = bunEvalPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the inspector url.
|
||||
* Gets the inspector url. This is deprecated and exists for compat.
|
||||
* @deprecated You should get the inspector directly (with .getInspector()), and if it's a WebSocketInspector you can access `.url` direclty.
|
||||
*/
|
||||
get url(): string {
|
||||
return this.#inspector.url;
|
||||
// This code has been migrated from a time when the inspector was always a WebSocketInspector.
|
||||
if (this.inspector instanceof WebSocketInspector) {
|
||||
return this.inspector.url;
|
||||
}
|
||||
|
||||
throw new Error("Inspector does not offer a URL");
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the inspector.
|
||||
* @param url the inspector url
|
||||
* @returns if the inspector was able to connect
|
||||
*/
|
||||
start(url?: string): Promise<boolean> {
|
||||
return this.#attach({ url });
|
||||
public getInspector() {
|
||||
return this.inspector;
|
||||
}
|
||||
|
||||
abstract start(...args: unknown[]): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Sends a request to the JavaScript inspector.
|
||||
* @param method the method name
|
||||
@@ -306,7 +323,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
* console.log(result.value); // 2
|
||||
*/
|
||||
async send<M extends keyof JSC.ResponseMap>(method: M, params?: JSC.RequestMap[M]): Promise<JSC.ResponseMap[M]> {
|
||||
return this.#inspector.send(method, params);
|
||||
return this.inspector.send(method, params);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -347,7 +364,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
return sent;
|
||||
}
|
||||
|
||||
#emit<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
protected emitAdapterEvent<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
this.emit("Adapter.event", {
|
||||
type: "event",
|
||||
seq: 0,
|
||||
@@ -359,7 +376,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
#emitAfterResponse<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
this.once("Adapter.response", () => {
|
||||
process.nextTick(() => {
|
||||
this.#emit(event, body);
|
||||
this.emitAdapterEvent(event, body);
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -437,19 +454,37 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.emit(`Adapter.${name}` as keyof DebugAdapterEventMap, body);
|
||||
}
|
||||
|
||||
initialize(request: InitializeRequest): DAP.InitializeResponse {
|
||||
public initialize(request: InitializeRequest): DAP.InitializeResponse {
|
||||
this.#initialized = request;
|
||||
|
||||
this.send("Inspector.enable");
|
||||
this.send("Runtime.enable");
|
||||
this.send("Console.enable");
|
||||
this.send("Debugger.enable").catch(error => {
|
||||
const { message } = unknownToError(error);
|
||||
if (message !== "Debugger domain already enabled") {
|
||||
throw error;
|
||||
|
||||
if (request.enableControlFlowProfiler) {
|
||||
this.send("Runtime.enableControlFlowProfiler");
|
||||
}
|
||||
|
||||
if (request.enableLifecycleAgentReporter) {
|
||||
this.send("LifecycleReporter.enable");
|
||||
|
||||
if (request.sendImmediatePreventExit) {
|
||||
this.send("LifecycleReporter.preventExit");
|
||||
}
|
||||
});
|
||||
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
|
||||
}
|
||||
|
||||
// use !== false because by default if unspecified we want to enable the debugger
|
||||
// and this option didn't exist beforehand, so we can't make it non-optional
|
||||
if (request.enableDebugger !== false) {
|
||||
this.send("Debugger.enable").catch(error => {
|
||||
const { message } = unknownToError(error);
|
||||
if (message !== "Debugger domain already enabled") {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
|
||||
}
|
||||
|
||||
const { clientID, supportsConfigurationDoneRequest } = request;
|
||||
if (!supportsConfigurationDoneRequest && clientID !== "vscode") {
|
||||
@@ -463,248 +498,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
configurationDone(): void {
|
||||
// If the client requested that `noDebug` mode be enabled,
|
||||
// then we need to disable all breakpoints and pause on statements.
|
||||
const active = !this.#options?.noDebug;
|
||||
const active = !this.options?.noDebug;
|
||||
this.send("Debugger.setBreakpointsActive", { active });
|
||||
|
||||
// Tell the debugger that its ready to start execution.
|
||||
this.send("Inspector.initialized");
|
||||
}
|
||||
|
||||
async launch(request: DAP.LaunchRequest): Promise<void> {
|
||||
this.#options = { ...request, type: "launch" };
|
||||
|
||||
try {
|
||||
await this.#launch(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.#emit("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #launch(request: LaunchRequest): Promise<void> {
|
||||
const {
|
||||
runtime = "bun",
|
||||
runtimeArgs = [],
|
||||
program,
|
||||
args = [],
|
||||
cwd,
|
||||
env = {},
|
||||
strictEnv = false,
|
||||
watchMode = false,
|
||||
stopOnEntry = false,
|
||||
__skipValidation = false,
|
||||
stdin,
|
||||
} = request;
|
||||
|
||||
if (!__skipValidation && !program) {
|
||||
throw new Error("No program specified");
|
||||
}
|
||||
|
||||
const processArgs = [...runtimeArgs];
|
||||
|
||||
if (program === "-" && stdin) {
|
||||
processArgs.push("--eval", stdin);
|
||||
} else if (program) {
|
||||
processArgs.push(program);
|
||||
}
|
||||
|
||||
processArgs.push(...args);
|
||||
|
||||
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
|
||||
processArgs.unshift("test");
|
||||
}
|
||||
|
||||
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
|
||||
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
|
||||
}
|
||||
|
||||
const processEnv = strictEnv
|
||||
? {
|
||||
...env,
|
||||
}
|
||||
: {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
// we're on unix
|
||||
const url = `ws+unix://${randomUnixPath()}`;
|
||||
const signal = new UnixSignal();
|
||||
|
||||
signal.on("Signal.received", () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
} else {
|
||||
// we're on windows
|
||||
// Create TCPSocketSignal
|
||||
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
|
||||
const signal = new TCPSocketSignal(await getAvailablePort());
|
||||
|
||||
signal.on("Signal.received", async () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async #spawn(options: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string | undefined>;
|
||||
isDebugee?: boolean;
|
||||
}): Promise<boolean> {
|
||||
const { command, args = [], cwd, env, isDebugee } = options;
|
||||
const request = { command, args, cwd, env };
|
||||
this.emit("Process.requested", request);
|
||||
|
||||
let subprocess: ChildProcess;
|
||||
try {
|
||||
subprocess = spawn(command, args, {
|
||||
...request,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
} catch (cause) {
|
||||
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
|
||||
return false;
|
||||
}
|
||||
|
||||
subprocess.on("spawn", () => {
|
||||
this.emit("Process.spawned", subprocess);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = subprocess;
|
||||
this.#emit("process", {
|
||||
name: `${command} ${args.join(" ")}`,
|
||||
systemProcessId: subprocess.pid,
|
||||
isLocalProcess: true,
|
||||
startMethod: "launch",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.on("exit", (code, signal) => {
|
||||
this.emit("Process.exited", code, signal);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = undefined;
|
||||
this.#emit("exited", {
|
||||
exitCode: code ?? -1,
|
||||
});
|
||||
this.#emit("terminated");
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.stdout?.on("data", data => {
|
||||
this.emit("Process.stdout", data.toString());
|
||||
});
|
||||
|
||||
subprocess.stderr?.on("data", data => {
|
||||
this.emit("Process.stderr", data.toString());
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
subprocess.on("spawn", () => resolve(true));
|
||||
subprocess.on("exit", () => resolve(false));
|
||||
subprocess.on("error", () => resolve(false));
|
||||
});
|
||||
}
|
||||
|
||||
async attach(request: AttachRequest): Promise<void> {
|
||||
this.#options = { ...request, type: "attach" };
|
||||
|
||||
try {
|
||||
await this.#attach(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.#emit("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #attach(request: AttachRequest): Promise<boolean> {
|
||||
const { url } = request;
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const ok = await this.#inspector.start(url);
|
||||
if (ok) {
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 100 * i));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
// Required so all implementations have a method that .terminate() always calls.
|
||||
// This is useful because we don't want any implementors to forget
|
||||
protected abstract exitJSProcess(): void;
|
||||
|
||||
terminate(): void {
|
||||
if (!this.#process?.kill()) {
|
||||
this.#evaluate({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
|
||||
this.#emit("terminated");
|
||||
this.exitJSProcess();
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
|
||||
disconnect(request: DAP.DisconnectRequest): void {
|
||||
@@ -1077,7 +884,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
for (const breakpoint of breakpoints) {
|
||||
this.#emit("breakpoint", {
|
||||
this.emitAdapterEvent("breakpoint", {
|
||||
reason: "removed",
|
||||
breakpoint,
|
||||
});
|
||||
@@ -1316,7 +1123,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
const objectGroup = callFrameId ? "debugger" : context;
|
||||
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression,
|
||||
objectGroup,
|
||||
callFrameId,
|
||||
@@ -1337,7 +1144,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
};
|
||||
}
|
||||
|
||||
async #evaluate(options: {
|
||||
protected async evaluateInternal(options: {
|
||||
expression: string;
|
||||
objectGroup?: string;
|
||||
callFrameId?: string;
|
||||
@@ -1361,7 +1168,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
|
||||
const { expression, hint } = completionToExpression(text);
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression: expression || "this",
|
||||
callFrameId,
|
||||
objectGroup: "repl",
|
||||
@@ -1393,33 +1200,29 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
["Inspector.connected"](): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output: "Debugger attached.\n",
|
||||
});
|
||||
|
||||
this.#emit("initialized");
|
||||
this.emitAdapterEvent("initialized");
|
||||
}
|
||||
|
||||
async ["Inspector.disconnected"](error?: Error): Promise<void> {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output: "Debugger detached.\n",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
const { message } = error;
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `${message}\n`,
|
||||
});
|
||||
}
|
||||
|
||||
this.#reset();
|
||||
|
||||
if (this.#process?.exitCode !== null) {
|
||||
this.#emit("terminated");
|
||||
}
|
||||
this.resetInternal();
|
||||
}
|
||||
|
||||
async ["Debugger.scriptParsed"](event: JSC.Debugger.ScriptParsedEvent): Promise<void> {
|
||||
@@ -1470,7 +1273,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
return;
|
||||
}
|
||||
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: errorMessage,
|
||||
line: this.#lineFrom0BasedLine(errorLine),
|
||||
@@ -1498,7 +1301,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const breakpoint = breakpoints[i];
|
||||
const oldBreakpoint = oldBreakpoints[i];
|
||||
|
||||
this.#emit("breakpoint", {
|
||||
this.emitAdapterEvent("breakpoint", {
|
||||
reason: "changed",
|
||||
breakpoint: {
|
||||
...breakpoint,
|
||||
@@ -1581,7 +1384,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
}
|
||||
|
||||
this.#emit("stopped", {
|
||||
this.emitAdapterEvent("stopped", {
|
||||
threadId: this.#threadId,
|
||||
reason: this.#stopped,
|
||||
hitBreakpointIds,
|
||||
@@ -1598,20 +1401,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
}
|
||||
|
||||
this.#emit("continued", {
|
||||
this.emitAdapterEvent("continued", {
|
||||
threadId: this.#threadId,
|
||||
});
|
||||
}
|
||||
|
||||
["Process.stdout"](output: string): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output,
|
||||
});
|
||||
}
|
||||
|
||||
["Process.stderr"](output: string): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output,
|
||||
});
|
||||
@@ -1695,8 +1498,8 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
|
||||
// If the path changed or the source has a source reference,
|
||||
// the old source should be marked as removed.
|
||||
if (path !== oldPath || sourceReference) {
|
||||
this.#emit("loadedSource", {
|
||||
if (path !== oldPath /*|| sourceReference*/) {
|
||||
this.emitAdapterEvent("loadedSource", {
|
||||
reason: "removed",
|
||||
source: oldSource,
|
||||
});
|
||||
@@ -1706,7 +1509,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#sources.set(sourceId, source);
|
||||
this.#sources.set(scriptId, source);
|
||||
|
||||
this.#emit("loadedSource", {
|
||||
this.emitAdapterEvent("loadedSource", {
|
||||
// If the reason is "changed", the source will be retrieved using
|
||||
// the `source` command, which is why it cannot be set when `path` is present.
|
||||
reason: oldSource && !path ? "changed" : "new",
|
||||
@@ -1762,9 +1565,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
// If the source is not present, it may not have been loaded yet.
|
||||
let resolves = this.#pendingSources.get(sourceId);
|
||||
let resolves = this.#pendingSources.get(sourceId.toString());
|
||||
if (!resolves) {
|
||||
this.#pendingSources.set(sourceId, (resolves = []));
|
||||
this.#pendingSources.set(sourceId.toString(), (resolves = []));
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
@@ -2016,7 +1819,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
const objectGroup = callFrameId ? "debugger" : "repl";
|
||||
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression: `${expression} = (${value});`,
|
||||
objectGroup: "repl",
|
||||
callFrameId,
|
||||
@@ -2216,12 +2019,11 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.#process?.kill();
|
||||
this.#inspector.close();
|
||||
this.#reset();
|
||||
this.inspector.close();
|
||||
this.resetInternal();
|
||||
}
|
||||
|
||||
#reset(): void {
|
||||
protected resetInternal(): void {
|
||||
this.#pendingSources.clear();
|
||||
this.#sources.clear();
|
||||
this.#stackFrames.length = 0;
|
||||
@@ -2232,10 +2034,309 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#functionBreakpoints.clear();
|
||||
this.#targets.clear();
|
||||
this.#variables.clear();
|
||||
this.#options = undefined;
|
||||
this.options = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a debug adapter that connects over a unix/tcp socket. Usually
|
||||
* in the case of a reverse connection. This is used by the vscode extension.
|
||||
*
|
||||
* @warning This will gracefully handle socket closure, you don't need to add extra handling.
|
||||
*/
|
||||
export class NodeSocketDebugAdapter extends BaseDebugAdapter<NodeSocketInspector> {
|
||||
public constructor(socket: Socket, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super(new NodeSocketInspector(socket), untitledDocPath, bunEvalPath);
|
||||
|
||||
socket.once("close", () => {
|
||||
this.resetInternal();
|
||||
});
|
||||
}
|
||||
|
||||
protected exitJSProcess(): void {
|
||||
this.evaluateInternal({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
|
||||
public async start() {
|
||||
const ok = await this.inspector.start();
|
||||
return ok;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The default debug adapter. Connects via WebSocket
|
||||
*/
|
||||
export class WebSocketDebugAdapter extends BaseDebugAdapter<WebSocketInspector> {
|
||||
#process?: ChildProcess;
|
||||
|
||||
public constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super(new WebSocketInspector(url), untitledDocPath, bunEvalPath);
|
||||
}
|
||||
|
||||
async ["Inspector.disconnected"](error?: Error): Promise<void> {
|
||||
await super["Inspector.disconnected"](error);
|
||||
|
||||
if (this.#process?.exitCode !== null) {
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
}
|
||||
|
||||
protected exitJSProcess() {
|
||||
if (!this.#process?.kill()) {
|
||||
this.evaluateInternal({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the inspector.
|
||||
* @param url the inspector url, will default to the one provided in the constructor (if any). If none
|
||||
* @returns if the inspector was able to connect
|
||||
*/
|
||||
start(url?: string): Promise<boolean> {
|
||||
return this.#attach({ url });
|
||||
}
|
||||
|
||||
close() {
|
||||
this.#process?.kill();
|
||||
super.close();
|
||||
}
|
||||
|
||||
async launch(request: DAP.LaunchRequest): Promise<void> {
|
||||
this.options = { ...request, type: "launch" };
|
||||
|
||||
try {
|
||||
await this.#launch(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #launch(request: LaunchRequest): Promise<void> {
|
||||
const {
|
||||
runtime = "bun",
|
||||
runtimeArgs = [],
|
||||
program,
|
||||
args = [],
|
||||
cwd,
|
||||
env = {},
|
||||
strictEnv = false,
|
||||
watchMode = false,
|
||||
stopOnEntry = false,
|
||||
__skipValidation = false,
|
||||
stdin,
|
||||
} = request;
|
||||
|
||||
if (!__skipValidation && !program) {
|
||||
throw new Error("No program specified");
|
||||
}
|
||||
|
||||
const processArgs = [...runtimeArgs];
|
||||
|
||||
if (program === "-" && stdin) {
|
||||
processArgs.push("--eval", stdin);
|
||||
} else if (program) {
|
||||
processArgs.push(program);
|
||||
}
|
||||
|
||||
processArgs.push(...args);
|
||||
|
||||
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
|
||||
processArgs.unshift("test");
|
||||
}
|
||||
|
||||
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
|
||||
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
|
||||
}
|
||||
|
||||
const processEnv = strictEnv
|
||||
? {
|
||||
...env,
|
||||
}
|
||||
: {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
// we're on unix
|
||||
const url = `ws+unix://${randomUnixPath()}`;
|
||||
const signal = new UnixSignal();
|
||||
|
||||
signal.on("Signal.received", () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
} else {
|
||||
// we're on windows
|
||||
// Create TCPSocketSignal
|
||||
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
|
||||
const signal = new TCPSocketSignal(await getAvailablePort());
|
||||
|
||||
signal.on("Signal.received", async () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async #spawn(options: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string | undefined>;
|
||||
isDebugee?: boolean;
|
||||
}): Promise<boolean> {
|
||||
const { command, args = [], cwd, env, isDebugee } = options;
|
||||
const request = { command, args, cwd, env };
|
||||
this.emit("Process.requested", request);
|
||||
|
||||
let subprocess: ChildProcess;
|
||||
try {
|
||||
subprocess = spawn(command, args, {
|
||||
...request,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
} catch (cause) {
|
||||
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
|
||||
return false;
|
||||
}
|
||||
|
||||
subprocess.on("spawn", () => {
|
||||
this.emit("Process.spawned", subprocess);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = subprocess;
|
||||
this.emitAdapterEvent("process", {
|
||||
name: `${command} ${args.join(" ")}`,
|
||||
systemProcessId: subprocess.pid,
|
||||
isLocalProcess: true,
|
||||
startMethod: "launch",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.on("exit", (code, signal) => {
|
||||
this.emit("Process.exited", code, signal);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = undefined;
|
||||
this.emitAdapterEvent("exited", {
|
||||
exitCode: code ?? -1,
|
||||
});
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.stdout?.on("data", data => {
|
||||
this.emit("Process.stdout", data.toString());
|
||||
});
|
||||
|
||||
subprocess.stderr?.on("data", data => {
|
||||
this.emit("Process.stderr", data.toString());
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
subprocess.on("spawn", () => resolve(true));
|
||||
subprocess.on("exit", () => resolve(false));
|
||||
subprocess.on("error", () => resolve(false));
|
||||
});
|
||||
}
|
||||
|
||||
async attach(request: AttachRequest): Promise<void> {
|
||||
this.options = { ...request, type: "attach" };
|
||||
|
||||
try {
|
||||
await this.#attach(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #attach(request: AttachRequest): Promise<boolean> {
|
||||
const { url } = request;
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const ok = await this.inspector.start(url);
|
||||
if (ok) {
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 100 * i));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const DebugAdapter = WebSocketDebugAdapter;
|
||||
|
||||
function stoppedReason(reason: JSC.Debugger.PausedEvent["reason"]): DAP.StoppedEvent["reason"] {
|
||||
switch (reason) {
|
||||
case "Breakpoint":
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
import type { Socket } from "node:net";
|
||||
const enum FramerState {
|
||||
WaitingForLength,
|
||||
WaitingForMessage,
|
||||
}
|
||||
|
||||
let socketFramerMessageLengthBuffer: Buffer;
|
||||
export class SocketFramer {
|
||||
state: FramerState = FramerState.WaitingForLength;
|
||||
pendingLength: number = 0;
|
||||
sizeBuffer: Buffer = Buffer.alloc(4);
|
||||
sizeBufferIndex: number = 0;
|
||||
bufferedData: Buffer = Buffer.alloc(0);
|
||||
socket: Socket;
|
||||
private onMessage: (message: string | string[]) => void;
|
||||
|
||||
constructor(socket: Socket, onMessage: (message: string | string[]) => void) {
|
||||
this.socket = socket;
|
||||
this.onMessage = onMessage;
|
||||
|
||||
if (!socketFramerMessageLengthBuffer) {
|
||||
socketFramerMessageLengthBuffer = Buffer.alloc(4);
|
||||
}
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset(): void {
|
||||
this.state = FramerState.WaitingForLength;
|
||||
this.bufferedData = Buffer.alloc(0);
|
||||
this.sizeBufferIndex = 0;
|
||||
this.sizeBuffer = Buffer.alloc(4);
|
||||
}
|
||||
|
||||
send(data: string): void {
|
||||
socketFramerMessageLengthBuffer.writeUInt32BE(data.length, 0);
|
||||
this.socket.write(socketFramerMessageLengthBuffer);
|
||||
this.socket.write(data);
|
||||
}
|
||||
|
||||
onData(data: Buffer): void {
|
||||
this.bufferedData = this.bufferedData.length > 0 ? Buffer.concat([this.bufferedData, data]) : data;
|
||||
|
||||
let messagesToDeliver: string[] = [];
|
||||
let position = 0;
|
||||
|
||||
while (position < this.bufferedData.length) {
|
||||
// Need 4 bytes for the length
|
||||
if (this.bufferedData.length - position < 4) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Read the length prefix
|
||||
const messageLength = this.bufferedData.readUInt32BE(position);
|
||||
|
||||
// Validate message length
|
||||
if (messageLength <= 0 || messageLength > 1024 * 1024) {
|
||||
// 1MB max
|
||||
// Try to resync by looking for the next valid message
|
||||
let newPosition = position + 1;
|
||||
let found = false;
|
||||
|
||||
while (newPosition < this.bufferedData.length - 4) {
|
||||
const testLength = this.bufferedData.readUInt32BE(newPosition);
|
||||
|
||||
if (testLength > 0 && testLength <= 1024 * 1024) {
|
||||
// Verify we can read the full message
|
||||
if (this.bufferedData.length - newPosition - 4 >= testLength) {
|
||||
const testMessage = this.bufferedData.toString("utf-8", newPosition + 4, newPosition + 4 + testLength);
|
||||
|
||||
if (testMessage.startsWith('{"')) {
|
||||
position = newPosition;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newPosition++;
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
// Couldn't find a valid message, discard buffer up to this point
|
||||
this.bufferedData = this.bufferedData.slice(position + 4);
|
||||
return;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have the complete message
|
||||
if (this.bufferedData.length - position - 4 < messageLength) {
|
||||
break;
|
||||
}
|
||||
|
||||
const message = this.bufferedData.toString("utf-8", position + 4, position + 4 + messageLength);
|
||||
if (message.startsWith('{"')) {
|
||||
messagesToDeliver.push(message);
|
||||
}
|
||||
|
||||
position += 4 + messageLength;
|
||||
}
|
||||
|
||||
if (position > 0) {
|
||||
this.bufferedData =
|
||||
position < this.bufferedData.length ? this.bufferedData.slice(position) : SocketFramer.emptyBuffer;
|
||||
}
|
||||
|
||||
if (messagesToDeliver.length === 1) {
|
||||
this.onMessage(messagesToDeliver[0]);
|
||||
} else if (messagesToDeliver.length > 1) {
|
||||
this.onMessage(messagesToDeliver);
|
||||
}
|
||||
}
|
||||
|
||||
private static emptyBuffer = Buffer.from([]);
|
||||
}
|
||||
@@ -11,6 +11,8 @@ export type UnixSignalEventMap = {
|
||||
"Signal.error": [Error];
|
||||
"Signal.received": [string];
|
||||
"Signal.closed": [];
|
||||
"Signal.Socket.closed": [socket: Socket];
|
||||
"Signal.Socket.connect": [socket: Socket];
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -21,7 +23,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
#server: Server;
|
||||
#ready: Promise<void>;
|
||||
|
||||
constructor(path?: string | URL) {
|
||||
constructor(path?: string | URL | undefined) {
|
||||
super();
|
||||
this.#path = path ? parseUnixPath(path) : randomUnixPath();
|
||||
this.#server = createServer();
|
||||
@@ -29,9 +31,13 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
this.#server.on("error", error => this.emit("Signal.error", error));
|
||||
this.#server.on("close", () => this.emit("Signal.closed"));
|
||||
this.#server.on("connection", socket => {
|
||||
this.emit("Signal.Socket.connect", socket);
|
||||
socket.on("data", data => {
|
||||
this.emit("Signal.received", data.toString());
|
||||
});
|
||||
socket.on("close", () => {
|
||||
this.emit("Signal.Socket.closed", socket);
|
||||
});
|
||||
});
|
||||
this.#ready = new Promise((resolve, reject) => {
|
||||
this.#server.on("listening", resolve);
|
||||
@@ -45,7 +51,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
console.log(event, ...args);
|
||||
}
|
||||
|
||||
return super.emit(event, ...args);
|
||||
return super.emit(event, ...(args as never));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -91,6 +97,8 @@ export type TCPSocketSignalEventMap = {
|
||||
"Signal.error": [Error];
|
||||
"Signal.closed": [];
|
||||
"Signal.received": [string];
|
||||
"Signal.Socket.closed": [socket: Socket];
|
||||
"Signal.Socket.connect": [socket: Socket];
|
||||
};
|
||||
|
||||
export class TCPSocketSignal extends EventEmitter {
|
||||
@@ -103,6 +111,8 @@ export class TCPSocketSignal extends EventEmitter {
|
||||
this.#port = port;
|
||||
|
||||
this.#server = createServer((socket: Socket) => {
|
||||
this.emit("Signal.Socket.connect", socket);
|
||||
|
||||
socket.on("data", data => {
|
||||
this.emit("Signal.received", data.toString());
|
||||
});
|
||||
@@ -112,10 +122,14 @@ export class TCPSocketSignal extends EventEmitter {
|
||||
});
|
||||
|
||||
socket.on("close", () => {
|
||||
this.emit("Signal.closed");
|
||||
this.emit("Signal.Socket.closed", socket);
|
||||
});
|
||||
});
|
||||
|
||||
this.#server.on("close", () => {
|
||||
this.emit("Signal.closed");
|
||||
});
|
||||
|
||||
this.#ready = new Promise((resolve, reject) => {
|
||||
this.#server.listen(this.#port, () => {
|
||||
this.emit("Signal.listening");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { SourceMap } from "./sourcemap";
|
||||
import { SourceMap } from "./sourcemap.js";
|
||||
|
||||
test("works without source map", () => {
|
||||
const sourceMap = getSourceMap("without-sourcemap.js");
|
||||
|
||||
@@ -21,7 +21,15 @@ export type Location = {
|
||||
);
|
||||
|
||||
export interface SourceMap {
|
||||
/**
|
||||
* Converts a location in the original source to a location in the generated source.
|
||||
* @param request A request
|
||||
*/
|
||||
generatedLocation(request: LocationRequest): Location;
|
||||
/**
|
||||
* Converts a location in the generated source to a location in the original source.
|
||||
* @param request A request
|
||||
*/
|
||||
originalLocation(request: LocationRequest): Location;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"module": "NodeNext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"moduleDetection": "force",
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
"composite": true,
|
||||
// "composite": true,
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
@@ -15,7 +15,7 @@
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/src"]
|
||||
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/**/*.ts"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export type * from "./src/inspector";
|
||||
export * from "./src/inspector/websocket";
|
||||
export type * from "./src/protocol";
|
||||
export * from "./src/util/preview";
|
||||
export type * from "./src/inspector/index.js";
|
||||
export * from "./src/inspector/websocket.js";
|
||||
export type * from "./src/protocol/index.js";
|
||||
export * from "./src/util/preview.js";
|
||||
|
||||
@@ -1,26 +1,7 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
import { readFileSync, writeFileSync, realpathSync } from "node:fs";
|
||||
import type { Domain, Property, Protocol } from "../src/protocol/schema";
|
||||
|
||||
run().catch(console.error);
|
||||
|
||||
async function run() {
|
||||
const cwd = new URL("../src/protocol/", import.meta.url);
|
||||
const runner = "Bun" in globalThis ? "bunx" : "npx";
|
||||
const write = (name: string, data: string) => {
|
||||
const path = new URL(name, cwd);
|
||||
writeFileSync(path, data);
|
||||
spawnSync(runner, ["prettier", "--write", path.pathname], { cwd, stdio: "ignore" });
|
||||
};
|
||||
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
|
||||
const baseNoComments = base.replace(/\/\/.*/g, "");
|
||||
const jsc = await downloadJsc();
|
||||
write("jsc/protocol.json", JSON.stringify(jsc));
|
||||
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
|
||||
const v8 = await downloadV8();
|
||||
write("v8/protocol.json", JSON.stringify(v8));
|
||||
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));
|
||||
}
|
||||
import path from "node:path";
|
||||
|
||||
function formatProtocol(protocol: Protocol, extraTs?: string): string {
|
||||
const { name, domains } = protocol;
|
||||
@@ -29,6 +10,7 @@ function formatProtocol(protocol: Protocol, extraTs?: string): string {
|
||||
let body = `export namespace ${name} {`;
|
||||
for (const { domain, types = [], events = [], commands = [] } of domains) {
|
||||
body += `export namespace ${domain} {`;
|
||||
|
||||
for (const type of types) {
|
||||
body += formatProperty(type);
|
||||
}
|
||||
@@ -153,32 +135,12 @@ async function downloadV8(): Promise<Protocol> {
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* @link https://github.com/WebKit/WebKit/tree/main/Source/JavaScriptCore/inspector/protocol
|
||||
*/
|
||||
async function downloadJsc(): Promise<Protocol> {
|
||||
const baseUrl = "https://raw.githubusercontent.com/WebKit/WebKit/main/Source/JavaScriptCore/inspector/protocol";
|
||||
const domains = [
|
||||
"Runtime",
|
||||
"Console",
|
||||
"Debugger",
|
||||
"Heap",
|
||||
"ScriptProfiler",
|
||||
"CPUProfiler",
|
||||
"GenericTypes",
|
||||
"Network",
|
||||
"Inspector",
|
||||
];
|
||||
return {
|
||||
name: "JSC",
|
||||
version: {
|
||||
major: 1,
|
||||
minor: 3,
|
||||
},
|
||||
domains: await Promise.all(domains.map(domain => download<Domain>(`${baseUrl}/${domain}.json`))).then(domains =>
|
||||
domains.sort((a, b) => a.domain.localeCompare(b.domain)),
|
||||
),
|
||||
};
|
||||
async function getJSC(): Promise<Protocol> {
|
||||
let bunExecutable = Bun.which("bun-debug") || process.execPath;
|
||||
if (!bunExecutable) {
|
||||
throw new Error("bun-debug not found");
|
||||
}
|
||||
bunExecutable = realpathSync(bunExecutable);
|
||||
}
|
||||
|
||||
async function download<V>(url: string): Promise<V> {
|
||||
@@ -200,3 +162,39 @@ function toComment(description?: string): string {
|
||||
const lines = ["/**", ...description.split("\n").map(line => ` * ${line.trim()}`), "*/"];
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
const cwd = new URL("../src/protocol/", import.meta.url);
|
||||
const runner = "Bun" in globalThis ? "bunx" : "npx";
|
||||
const write = (name: string, data: string) => {
|
||||
const filePath = path.resolve(__dirname, "..", "src", "protocol", name);
|
||||
writeFileSync(filePath, data);
|
||||
spawnSync(runner, ["prettier", "--write", filePath], { cwd, stdio: "ignore" });
|
||||
};
|
||||
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
|
||||
const baseNoComments = base.replace(/\/\/.*/g, "");
|
||||
|
||||
const jscJsonFile = path.resolve(__dirname, process.argv.at(-1) ?? "");
|
||||
let jscJSONFile;
|
||||
try {
|
||||
jscJSONFile = await Bun.file(jscJsonFile).json();
|
||||
} catch (error) {
|
||||
console.warn("Failed to read CombinedDomains.json from WebKit build. Is this a WebKit build from Bun?");
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const jsc = {
|
||||
name: "JSC",
|
||||
version: {
|
||||
major: 1,
|
||||
minor: 4,
|
||||
},
|
||||
domains: jscJSONFile.domains
|
||||
.filter(a => a.debuggableTypes?.includes?.("javascript"))
|
||||
.sort((a, b) => a.domain.localeCompare(b.domain)),
|
||||
};
|
||||
write("jsc/protocol.json", JSON.stringify(jsc, null, 2));
|
||||
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
|
||||
const v8 = await downloadV8();
|
||||
write("v8/protocol.json", JSON.stringify(v8));
|
||||
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));
|
||||
|
||||
235
packages/bun-inspector-protocol/src/inspector/node-socket.ts
Normal file
235
packages/bun-inspector-protocol/src/inspector/node-socket.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import { Socket } from "node:net";
|
||||
import { SocketFramer } from "../../../bun-debug-adapter-protocol/src/debugger/node-socket-framer.js";
|
||||
import type { JSC } from "../protocol";
|
||||
import type { Inspector, InspectorEventMap } from "./index";
|
||||
|
||||
/**
|
||||
* An inspector that communicates with a debugger over a (unix) socket.
|
||||
* This is used in the extension as follows:
|
||||
*
|
||||
* 1. Extension sets environment variable `BUN_INSPECT_NOTIFY` inside of all vscode terminals.
|
||||
* This is a path to a unix socket that the extension will listen on.
|
||||
* 2. Bun reads it and connects to the socket, setting up a reverse connection for sending DAP
|
||||
* messages.
|
||||
*/
|
||||
export class NodeSocketInspector extends EventEmitter<InspectorEventMap> implements Inspector {
|
||||
#ready: Promise<boolean> | undefined;
|
||||
#socket: Socket;
|
||||
#requestId: number;
|
||||
#pendingRequests: JSC.Request[];
|
||||
#pendingResponses: Map<
|
||||
number,
|
||||
{
|
||||
request: JSC.Request;
|
||||
done: (result: unknown) => void;
|
||||
}
|
||||
>;
|
||||
#framer: SocketFramer;
|
||||
|
||||
constructor(socket: Socket) {
|
||||
super();
|
||||
this.#socket = socket;
|
||||
this.#requestId = 1;
|
||||
this.#pendingRequests = [];
|
||||
this.#pendingResponses = new Map();
|
||||
|
||||
this.#framer = new SocketFramer(socket, message => {
|
||||
if (Array.isArray(message)) {
|
||||
for (const m of message) {
|
||||
this.#accept(m);
|
||||
}
|
||||
} else {
|
||||
this.#accept(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private onConnectOrImmediately(cb: () => void) {
|
||||
const isAlreadyConnected = this.#socket.connecting === false;
|
||||
|
||||
if (isAlreadyConnected) {
|
||||
cb();
|
||||
} else {
|
||||
this.#socket.once("connect", cb);
|
||||
}
|
||||
}
|
||||
|
||||
async start(): Promise<boolean> {
|
||||
if (this.#ready) {
|
||||
return this.#ready;
|
||||
}
|
||||
|
||||
if (this.closed) {
|
||||
this.close();
|
||||
const addressWithPort = this.#socket.remoteAddress + ":" + this.#socket.remotePort;
|
||||
this.emit("Inspector.connecting", addressWithPort);
|
||||
}
|
||||
|
||||
const socket = this.#socket;
|
||||
|
||||
this.onConnectOrImmediately(() => {
|
||||
this.emit("Inspector.connected");
|
||||
|
||||
for (let i = 0; i < this.#pendingRequests.length; i++) {
|
||||
const request = this.#pendingRequests[i];
|
||||
|
||||
if (this.#send(request)) {
|
||||
this.emit("Inspector.request", request);
|
||||
} else {
|
||||
this.#pendingRequests = this.#pendingRequests.slice(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("data", data => this.#framer.onData(data));
|
||||
|
||||
socket.on("error", error => {
|
||||
this.#close(unknownToError(error));
|
||||
});
|
||||
|
||||
socket.on("close", hadError => {
|
||||
if (hadError) {
|
||||
this.#close(new Error("Socket closed due to a transmission error"));
|
||||
} else {
|
||||
this.#close();
|
||||
}
|
||||
});
|
||||
|
||||
const ready = new Promise<boolean>(resolve => {
|
||||
if (socket.connecting) {
|
||||
socket.on("connect", () => resolve(true));
|
||||
} else {
|
||||
resolve(true);
|
||||
}
|
||||
socket.on("close", () => resolve(false));
|
||||
socket.on("error", () => resolve(false));
|
||||
}).finally(() => {
|
||||
this.#ready = undefined;
|
||||
});
|
||||
|
||||
this.#ready = ready;
|
||||
|
||||
return ready;
|
||||
}
|
||||
|
||||
send<M extends keyof JSC.RequestMap & keyof JSC.ResponseMap>(
|
||||
method: M,
|
||||
params?: JSC.RequestMap[M] | undefined,
|
||||
): Promise<JSC.ResponseMap[M]> {
|
||||
const id = this.#requestId++;
|
||||
const request = {
|
||||
id,
|
||||
method,
|
||||
params: params ?? {},
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let timerId: number | undefined;
|
||||
const done = (result: any) => {
|
||||
this.#pendingResponses.delete(id);
|
||||
if (timerId) {
|
||||
clearTimeout(timerId);
|
||||
}
|
||||
if (result instanceof Error) {
|
||||
reject(result);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
};
|
||||
|
||||
this.#pendingResponses.set(id, {
|
||||
request: request,
|
||||
done: done,
|
||||
});
|
||||
|
||||
if (this.#send(request)) {
|
||||
timerId = +setTimeout(() => done(new Error(`Timed out: ${method}`)), 10_000);
|
||||
this.emit("Inspector.request", request);
|
||||
} else {
|
||||
this.emit("Inspector.pendingRequest", request);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#send(request: JSC.Request): boolean {
|
||||
this.#framer.send(JSON.stringify(request));
|
||||
|
||||
if (!this.#pendingRequests.includes(request)) {
|
||||
this.#pendingRequests.push(request);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#accept(message: string): void {
|
||||
let data: JSC.Event | JSC.Response;
|
||||
try {
|
||||
data = JSON.parse(message);
|
||||
} catch (cause) {
|
||||
this.emit("Inspector.error", new Error(`Failed to parse message: ${message}`, { cause }));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!("id" in data)) {
|
||||
this.emit("Inspector.event", data);
|
||||
const { method, params } = data;
|
||||
this.emit(method, params);
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit("Inspector.response", data);
|
||||
|
||||
const { id } = data;
|
||||
const handle = this.#pendingResponses.get(id);
|
||||
if (!handle) {
|
||||
this.emit("Inspector.error", new Error(`Failed to find matching request for ID: ${id}`));
|
||||
return;
|
||||
}
|
||||
|
||||
if ("error" in data) {
|
||||
const { error } = data;
|
||||
const { message } = error;
|
||||
handle.done(new Error(message));
|
||||
} else {
|
||||
const { result } = data;
|
||||
handle.done(result);
|
||||
}
|
||||
}
|
||||
|
||||
get closed(): boolean {
|
||||
return !this.#socket.writable;
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.#socket?.end();
|
||||
}
|
||||
|
||||
#close(error?: Error): void {
|
||||
for (const handle of this.#pendingResponses.values()) {
|
||||
handle.done(error ?? new Error("Socket closed while waiting for: " + handle.request.method));
|
||||
}
|
||||
|
||||
this.#pendingResponses.clear();
|
||||
|
||||
if (error) {
|
||||
this.emit("Inspector.error", error);
|
||||
}
|
||||
|
||||
this.emit("Inspector.disconnected", error);
|
||||
}
|
||||
}
|
||||
|
||||
function unknownToError(input: unknown): Error {
|
||||
if (input instanceof Error) {
|
||||
return input;
|
||||
}
|
||||
|
||||
if (typeof input === "object" && input !== null && "message" in input) {
|
||||
const { message } = input;
|
||||
return new Error(`${message}`);
|
||||
}
|
||||
|
||||
return new Error(`${input}`);
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import { WebSocket } from "ws";
|
||||
import type { Inspector, InspectorEventMap } from ".";
|
||||
import type { JSC } from "../protocol";
|
||||
import type { Inspector, InspectorEventMap } from "./index";
|
||||
|
||||
/**
|
||||
* An inspector that communicates with a debugger over a WebSocket.
|
||||
@@ -170,6 +170,7 @@ export class WebSocketInspector extends EventEmitter<InspectorEventMap> implemen
|
||||
|
||||
#accept(message: string): void {
|
||||
let data: JSC.Event | JSC.Response;
|
||||
|
||||
try {
|
||||
data = JSON.parse(message);
|
||||
} catch (cause) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
"lib": ["ESNext"],
|
||||
"module": "ESNext",
|
||||
"target": "ESNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"moduleDetection": "force",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
@@ -12,7 +12,7 @@
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": [".", "../bun-types/index.d.ts"]
|
||||
}
|
||||
|
||||
73
packages/bun-native-bundler-plugin-api/bundler_plugin.h
Normal file
73
packages/bun-native-bundler-plugin-api/bundler_plugin.h
Normal file
@@ -0,0 +1,73 @@
|
||||
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
typedef enum {
|
||||
BUN_LOADER_JSX = 0,
|
||||
BUN_LOADER_JS = 1,
|
||||
BUN_LOADER_TS = 2,
|
||||
BUN_LOADER_TSX = 3,
|
||||
BUN_LOADER_CSS = 4,
|
||||
BUN_LOADER_FILE = 5,
|
||||
BUN_LOADER_JSON = 6,
|
||||
BUN_LOADER_TOML = 7,
|
||||
BUN_LOADER_WASM = 8,
|
||||
BUN_LOADER_NAPI = 9,
|
||||
BUN_LOADER_BASE64 = 10,
|
||||
BUN_LOADER_DATAURL = 11,
|
||||
BUN_LOADER_TEXT = 12,
|
||||
} BunLoader;
|
||||
|
||||
const BunLoader BUN_LOADER_MAX = BUN_LOADER_TEXT;
|
||||
|
||||
typedef struct BunLogOptions {
|
||||
size_t __struct_size;
|
||||
const uint8_t *message_ptr;
|
||||
size_t message_len;
|
||||
const uint8_t *path_ptr;
|
||||
size_t path_len;
|
||||
const uint8_t *source_line_text_ptr;
|
||||
size_t source_line_text_len;
|
||||
int8_t level;
|
||||
int line;
|
||||
int lineEnd;
|
||||
int column;
|
||||
int columnEnd;
|
||||
} BunLogOptions;
|
||||
|
||||
typedef struct {
|
||||
size_t __struct_size;
|
||||
void *bun;
|
||||
const uint8_t *path_ptr;
|
||||
size_t path_len;
|
||||
const uint8_t *namespace_ptr;
|
||||
size_t namespace_len;
|
||||
uint8_t default_loader;
|
||||
void *external;
|
||||
} OnBeforeParseArguments;
|
||||
|
||||
typedef struct OnBeforeParseResult {
|
||||
size_t __struct_size;
|
||||
uint8_t *source_ptr;
|
||||
size_t source_len;
|
||||
uint8_t loader;
|
||||
int (*fetchSourceCode)(const OnBeforeParseArguments *args,
|
||||
struct OnBeforeParseResult *result);
|
||||
void *plugin_source_code_context;
|
||||
void (*free_plugin_source_code_context)(void *ctx);
|
||||
void (*log)(const OnBeforeParseArguments *args, BunLogOptions *options);
|
||||
} OnBeforeParseResult;
|
||||
|
||||
typedef enum {
|
||||
BUN_LOG_LEVEL_VERBOSE = 0,
|
||||
BUN_LOG_LEVEL_DEBUG = 1,
|
||||
BUN_LOG_LEVEL_INFO = 2,
|
||||
BUN_LOG_LEVEL_WARN = 3,
|
||||
BUN_LOG_LEVEL_ERROR = 4,
|
||||
} BunLogLevel;
|
||||
|
||||
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
|
||||
|
||||
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
1
packages/bun-native-plugin-rs/.gitignore
vendored
Normal file
1
packages/bun-native-plugin-rs/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
target/
|
||||
286
packages/bun-native-plugin-rs/Cargo.lock
generated
Normal file
286
packages/bun-native-plugin-rs/Cargo.lock
generated
Normal file
@@ -0,0 +1,286 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "1.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
|
||||
[[package]]
|
||||
name = "bun-native-plugin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"libc",
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
|
||||
dependencies = [
|
||||
"either",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.166"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.25"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.37"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.4.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
|
||||
|
||||
[[package]]
|
||||
name = "rustc-hash"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.89"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
"windows_x86_64_msvc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
7
packages/bun-native-plugin-rs/Cargo.toml
Normal file
7
packages/bun-native-plugin-rs/Cargo.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[package]
|
||||
name = "bun-native-plugin"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.70.1"
|
||||
248
packages/bun-native-plugin-rs/README.md
Normal file
248
packages/bun-native-plugin-rs/README.md
Normal file
@@ -0,0 +1,248 @@
|
||||
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
|
||||
|
||||
# Bun Native Plugins
|
||||
|
||||
This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
|
||||
|
||||
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
|
||||
|
||||
- Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
|
||||
- Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
|
||||
|
||||
What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
|
||||
|
||||
The currently supported lifecycle hooks are:
|
||||
|
||||
- `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
|
||||
|
||||
## Getting started
|
||||
|
||||
Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
|
||||
|
||||
```bash
|
||||
bun add -g @napi-rs/cli
|
||||
napi new
|
||||
```
|
||||
|
||||
Then install this crate:
|
||||
|
||||
```bash
|
||||
cargo add bun-native-plugin
|
||||
```
|
||||
|
||||
Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
|
||||
|
||||
For example, implementing `onBeforeParse`:
|
||||
|
||||
```rs
|
||||
use bun_native_plugin::{define_bun_plugin, OnBeforeParse};
|
||||
use napi_derive::napi;
|
||||
|
||||
/// Define with the name of the plugin
|
||||
define_bun_plugin!("replace-foo-with-bar");
|
||||
|
||||
/// This is necessary for napi-rs to compile this into a proper NAPI module
|
||||
#[napi]
|
||||
pub fn register_bun_plugin() {}
|
||||
|
||||
/// Use `no_mangle` so that we can reference this symbol by name later
|
||||
/// when registering this native plugin in JS.
|
||||
///
|
||||
/// Here we'll create a dummy plugin which replaces all occurences of
|
||||
/// `foo` with `bar`
|
||||
#[no_mangle]
|
||||
pub extern "C" fn on_before_parse_plugin_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
|
||||
// This returns a handle which is a safe wrapper over the raw
|
||||
// C API.
|
||||
let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
// `OnBeforeParse::from_raw` handles error logging
|
||||
// so it fine to return here.
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
// If we encounter an error, we must log it so that
|
||||
// Bun knows this plugin failed.
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let loader = handle.output_loader();
|
||||
let output_source_code = source_str.replace("foo", "bar");
|
||||
handle.set_output_source_code(output_source_code, loader);
|
||||
}
|
||||
```
|
||||
|
||||
Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
|
||||
|
||||
```bash
|
||||
bun run build
|
||||
```
|
||||
|
||||
This will produce a `.node` file in the project directory.
|
||||
|
||||
With the compiled NAPI module, you can now register the plugin from JS:
|
||||
|
||||
```js
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["index.ts"],
|
||||
plugins: [
|
||||
{
|
||||
name: "replace-foo-with-bar",
|
||||
setup(build) {
|
||||
const napiModule = require("path/to/napi_module.node");
|
||||
|
||||
// Register the `onBeforeParse` hook to run on all `.ts` files.
|
||||
// We tell it to use function we implemented inside of our `lib.rs` code.
|
||||
build.onBeforeParse(
|
||||
{ filter: /\.ts/ },
|
||||
{ napiModule, symbol: "on_before_parse_plugin_impl" },
|
||||
);
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
## Very important information
|
||||
|
||||
### Error handling and panics
|
||||
|
||||
It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
|
||||
|
||||
```rs
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
// If we encounter an error, we must log it so that
|
||||
// Bun knows this plugin failed.
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Passing state to and from JS: `External`
|
||||
|
||||
One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
|
||||
|
||||
An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
|
||||
the pointer and modify the data.
|
||||
|
||||
As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
|
||||
|
||||
You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
|
||||
that your state must be `Sync`:
|
||||
|
||||
```rs
|
||||
struct PluginState {
|
||||
foo_count: std::sync::atomic::AtomicU32,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
pub fn create_plugin_state() -> External<PluginState> {
|
||||
let external = External::new(PluginState {
|
||||
foo_count: 0,
|
||||
});
|
||||
|
||||
external
|
||||
}
|
||||
|
||||
|
||||
#[napi]
|
||||
pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
|
||||
let plugin_state: &PluginState = &plugin_state;
|
||||
plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
|
||||
}
|
||||
```
|
||||
|
||||
When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
|
||||
|
||||
```js
|
||||
const napiModule = require("path/to/napi_module.node");
|
||||
const pluginState = napiModule.createPluginState();
|
||||
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["index.ts"],
|
||||
plugins: [
|
||||
{
|
||||
name: "replace-foo-with-bar",
|
||||
setup(build) {
|
||||
build.onBeforeParse(
|
||||
{ filter: /\.ts/ },
|
||||
{
|
||||
napiModule,
|
||||
symbol: "on_before_parse_plugin_impl",
|
||||
// pass our NAPI external which contains our plugin state here
|
||||
external: pluginState,
|
||||
},
|
||||
);
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
console.log("Total `foo`s encountered: ", pluginState.getFooCount());
|
||||
```
|
||||
|
||||
Finally, from the native implementation of your plugin, you can extract the external:
|
||||
|
||||
```rs
|
||||
pub extern "C" fn on_before_parse_plugin_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
|
||||
let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
// `OnBeforeParse::from_raw` handles error logging
|
||||
// so it fine to return here.
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let plugin_state: &PluginState =
|
||||
// This operation is only safe if you pass in an external when registering the plugin.
|
||||
// If you don't, this could lead to a segfault or access of undefined memory.
|
||||
match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
|
||||
Ok(state) => state,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to get external!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Fetch our source code again
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Count the number of `foo`s and add it to our state
|
||||
let foo_count = source_code.matches("foo").count() as u32;
|
||||
plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
|
||||
}
|
||||
```
|
||||
|
||||
### Concurrency
|
||||
|
||||
Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
|
||||
|
||||
Therefore, you must design any state management to be threadsafe
|
||||
20
packages/bun-native-plugin-rs/build.rs
Normal file
20
packages/bun-native-plugin-rs/build.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn main() {
|
||||
println!("cargo:rustc-link-search=./headers");
|
||||
|
||||
let bindings = bindgen::Builder::default()
|
||||
.header("wrapper.h")
|
||||
// Add absolute path to headers directory
|
||||
.clang_arg("-I./headers")
|
||||
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
|
||||
.rustified_enum("BunLogLevel")
|
||||
.rustified_enum("BunLoader")
|
||||
.generate()
|
||||
.expect("Unable to generate bindings");
|
||||
|
||||
let out_path = PathBuf::from(std::env::var("OUT_DIR").unwrap());
|
||||
bindings
|
||||
.write_to_file(out_path.join("bindings.rs"))
|
||||
.expect("Couldn't write bindings!");
|
||||
}
|
||||
6
packages/bun-native-plugin-rs/copy_headers.ts
Normal file
6
packages/bun-native-plugin-rs/copy_headers.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { join } from "node:path";
|
||||
|
||||
const dirname = join(import.meta.dir, "../", "bun-native-bundler-plugin-api");
|
||||
await Bun.$`rm -rf headers`;
|
||||
await Bun.$`mkdir -p headers`;
|
||||
await Bun.$`cp -R ${dirname} headers/bun-native-bundler-plugin-api`;
|
||||
@@ -0,0 +1,79 @@
|
||||
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
typedef enum {
|
||||
BUN_LOADER_JSX = 0,
|
||||
BUN_LOADER_JS = 1,
|
||||
BUN_LOADER_TS = 2,
|
||||
BUN_LOADER_TSX = 3,
|
||||
BUN_LOADER_CSS = 4,
|
||||
BUN_LOADER_FILE = 5,
|
||||
BUN_LOADER_JSON = 6,
|
||||
BUN_LOADER_TOML = 7,
|
||||
BUN_LOADER_WASM = 8,
|
||||
BUN_LOADER_NAPI = 9,
|
||||
BUN_LOADER_BASE64 = 10,
|
||||
BUN_LOADER_DATAURL = 11,
|
||||
BUN_LOADER_TEXT = 12,
|
||||
BUN_LOADER_BUNSH = 13,
|
||||
BUN_LOADER_SQLITE = 14,
|
||||
BUN_LOADER_SQLITE_EMBEDDED = 15
|
||||
} BunLoader;
|
||||
|
||||
const BunLoader BUN_LOADER_MAX = BUN_LOADER_SQLITE_EMBEDDED;
|
||||
|
||||
typedef struct BunLogOptions {
|
||||
size_t __struct_size;
|
||||
const uint8_t* message_ptr;
|
||||
size_t message_len;
|
||||
const uint8_t* path_ptr;
|
||||
size_t path_len;
|
||||
const uint8_t* source_line_text_ptr;
|
||||
size_t source_line_text_len;
|
||||
int8_t level;
|
||||
int line;
|
||||
int lineEnd;
|
||||
int column;
|
||||
int columnEnd;
|
||||
} BunLogOptions;
|
||||
|
||||
typedef struct {
|
||||
size_t __struct_size;
|
||||
void* bun;
|
||||
const uint8_t* path_ptr;
|
||||
size_t path_len;
|
||||
const uint8_t* namespace_ptr;
|
||||
size_t namespace_len;
|
||||
uint8_t default_loader;
|
||||
void *external;
|
||||
} OnBeforeParseArguments;
|
||||
|
||||
typedef struct OnBeforeParseResult {
|
||||
size_t __struct_size;
|
||||
uint8_t* source_ptr;
|
||||
size_t source_len;
|
||||
uint8_t loader;
|
||||
int (*fetchSourceCode)(
|
||||
const OnBeforeParseArguments* args,
|
||||
struct OnBeforeParseResult* result
|
||||
);
|
||||
void* plugin_source_code_context;
|
||||
void (*free_plugin_source_code_context)(void* ctx);
|
||||
void (*log)(const OnBeforeParseArguments* args, BunLogOptions* options);
|
||||
} OnBeforeParseResult;
|
||||
|
||||
|
||||
typedef enum {
|
||||
BUN_LOG_LEVEL_VERBOSE = 0,
|
||||
BUN_LOG_LEVEL_DEBUG = 1,
|
||||
BUN_LOG_LEVEL_INFO = 2,
|
||||
BUN_LOG_LEVEL_WARN = 3,
|
||||
BUN_LOG_LEVEL_ERROR = 4,
|
||||
} BunLogLevel;
|
||||
|
||||
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
|
||||
|
||||
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H
|
||||
627
packages/bun-native-plugin-rs/src/lib.rs
Normal file
627
packages/bun-native-plugin-rs/src/lib.rs
Normal file
@@ -0,0 +1,627 @@
|
||||
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
|
||||
//!
|
||||
//! # Bun Native Plugins
|
||||
//!
|
||||
//! This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
|
||||
//!
|
||||
//! Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
|
||||
//!
|
||||
//! - Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
|
||||
//! - Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
|
||||
//!
|
||||
//! What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
|
||||
//!
|
||||
//! The currently supported lifecycle hooks are:
|
||||
//!
|
||||
//! - `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
|
||||
//!
|
||||
//! ## Getting started
|
||||
//!
|
||||
//! Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
|
||||
//!
|
||||
//! ```bash
|
||||
//! bun add -g @napi-rs/cli
|
||||
//! napi new
|
||||
//! ```
|
||||
//!
|
||||
//! Then install this crate:
|
||||
//!
|
||||
//! ```bash
|
||||
//! cargo add bun-native-plugin
|
||||
//! ```
|
||||
//!
|
||||
//! Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
|
||||
//!
|
||||
//! For example, implementing `onBeforeParse`:
|
||||
//!
|
||||
//! ```rust
|
||||
//! use bun_native_plugin::{OnBeforeParse};
|
||||
//!
|
||||
//! /// This is necessary for napi-rs to compile this into a proper NAPI module
|
||||
//! #[napi]
|
||||
//! pub fn register_bun_plugin() {}
|
||||
//!
|
||||
//! /// Use `no_mangle` so that we can reference this symbol by name later
|
||||
//! /// when registering this native plugin in JS.
|
||||
//! ///
|
||||
//! /// Here we'll create a dummy plugin which replaces all occurences of
|
||||
//! /// `foo` with `bar`
|
||||
//! #[no_mangle]
|
||||
//! pub extern "C" fn on_before_parse_plugin_impl(
|
||||
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
//! ) {
|
||||
//! let args = unsafe { &*args };
|
||||
//! let result = unsafe { &mut *result };
|
||||
//!
|
||||
//! // This returns a handle which is a safe wrapper over the raw
|
||||
//! // C API.
|
||||
//! let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
//! Ok(handle) => handle,
|
||||
//! Err(_) => {
|
||||
//! // `OnBeforeParse::from_raw` handles error logging
|
||||
//! // so it fine to return here.
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//!
|
||||
//! let input_source_code = match handle.input_source_code() {
|
||||
//! Ok(source_str) => source_str,
|
||||
//! Err(_) => {
|
||||
//! // If we encounter an error, we must log it so that
|
||||
//! // Bun knows this plugin failed.
|
||||
//! handle.log_error("Failed to fetch source code!");
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//!
|
||||
//! let loader = handle.output_loader();
|
||||
//! let output_source_code = source_str.replace("foo", "bar");
|
||||
//! handle.set_output_source_code(output_source_code, loader);
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
|
||||
//!
|
||||
//! ```bash
|
||||
//! bun run build
|
||||
//! ```
|
||||
//!
|
||||
//! This will produce a `.node` file in the project directory.
|
||||
//!
|
||||
//! With the compiled NAPI module, you can now register the plugin from JS:
|
||||
//!
|
||||
//! ```js
|
||||
//! const result = await Bun.build({
|
||||
//! entrypoints: ["index.ts"],
|
||||
//! plugins: [
|
||||
//! {
|
||||
//! name: "replace-foo-with-bar",
|
||||
//! setup(build) {
|
||||
//! const napiModule = require("path/to/napi_module.node");
|
||||
//!
|
||||
//! // Register the `onBeforeParse` hook to run on all `.ts` files.
|
||||
//! // We tell it to use function we implemented inside of our `lib.rs` code.
|
||||
//! build.onBeforeParse(
|
||||
//! { filter: /\.ts/ },
|
||||
//! { napiModule, symbol: "on_before_parse_plugin_impl" },
|
||||
//! );
|
||||
//! },
|
||||
//! },
|
||||
//! ],
|
||||
//! });
|
||||
//! ```
|
||||
//!
|
||||
//! ## Very important information
|
||||
//!
|
||||
//! ### Error handling and panics
|
||||
//!
|
||||
//! It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
|
||||
//!
|
||||
//! ```rust
|
||||
//! let input_source_code = match handle.input_source_code() {
|
||||
//! Ok(source_str) => source_str,
|
||||
//! Err(_) => {
|
||||
//! // If we encounter an error, we must log it so that
|
||||
//! // Bun knows this plugin failed.
|
||||
//! handle.log_error("Failed to fetch source code!");
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//! ```
|
||||
//!
|
||||
//! ### Passing state to and from JS: `External`
|
||||
//!
|
||||
//! One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
|
||||
//!
|
||||
//! An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
|
||||
//! the pointer and modify the data.
|
||||
//!
|
||||
//! As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
|
||||
//!
|
||||
//! You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
|
||||
//! that your state must be `Sync`:
|
||||
//!
|
||||
//! ```rust
|
||||
//! struct PluginState {
|
||||
//! foo_count: std::sync::atomic::AtomicU32,
|
||||
//! }
|
||||
//!
|
||||
//! #[napi]
|
||||
//! pub fn create_plugin_state() -> External<PluginState> {
|
||||
//! let external = External::new(PluginState {
|
||||
//! foo_count: 0,
|
||||
//! });
|
||||
//!
|
||||
//! external
|
||||
//! }
|
||||
//!
|
||||
//!
|
||||
//! #[napi]
|
||||
//! pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
|
||||
//! let plugin_state: &PluginState = &plugin_state;
|
||||
//! plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
|
||||
//!
|
||||
//! ```js
|
||||
//! const napiModule = require("path/to/napi_module.node");
|
||||
//! const pluginState = napiModule.createPluginState();
|
||||
//!
|
||||
//! const result = await Bun.build({
|
||||
//! entrypoints: ["index.ts"],
|
||||
//! plugins: [
|
||||
//! {
|
||||
//! name: "replace-foo-with-bar",
|
||||
//! setup(build) {
|
||||
//! build.onBeforeParse(
|
||||
//! { filter: /\.ts/ },
|
||||
//! {
|
||||
//! napiModule,
|
||||
//! symbol: "on_before_parse_plugin_impl",
|
||||
//! // pass our NAPI external which contains our plugin state here
|
||||
//! external: pluginState,
|
||||
//! },
|
||||
//! );
|
||||
//! },
|
||||
//! },
|
||||
//! ],
|
||||
//! });
|
||||
//!
|
||||
//! console.log("Total `foo`s encountered: ", pluginState.getFooCount());
|
||||
//! ```
|
||||
//!
|
||||
//! Finally, from the native implementation of your plugin, you can extract the external:
|
||||
//!
|
||||
//! ```rust
|
||||
//! pub extern "C" fn on_before_parse_plugin_impl(
|
||||
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
//! ) {
|
||||
//! let args = unsafe { &*args };
|
||||
//! let result = unsafe { &mut *result };
|
||||
//!
|
||||
//! let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
//! Ok(handle) => handle,
|
||||
//! Err(_) => {
|
||||
//! // `OnBeforeParse::from_raw` handles error logging
|
||||
//! // so it fine to return here.
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//!
|
||||
//! let plugin_state: &PluginState =
|
||||
//! // This operation is only safe if you pass in an external when registering the plugin.
|
||||
//! // If you don't, this could lead to a segfault or access of undefined memory.
|
||||
//! match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
|
||||
//! Ok(state) => state,
|
||||
//! Err(_) => {
|
||||
//! handle.log_error("Failed to get external!");
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//!
|
||||
//!
|
||||
//! // Fetch our source code again
|
||||
//! let input_source_code = match handle.input_source_code() {
|
||||
//! Ok(source_str) => source_str,
|
||||
//! Err(_) => {
|
||||
//! handle.log_error("Failed to fetch source code!");
|
||||
//! return;
|
||||
//! }
|
||||
//! };
|
||||
//!
|
||||
//! // Count the number of `foo`s and add it to our state
|
||||
//! let foo_count = source_code.matches("foo").count() as u32;
|
||||
//! plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! ### Concurrency
|
||||
//!
|
||||
//! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
|
||||
//!
|
||||
//! Therefore, you must design any state management to be threadsafe
|
||||
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct BunPluginName(*const c_char);
|
||||
|
||||
impl BunPluginName {
|
||||
pub const fn new(ptr: *const c_char) -> Self {
|
||||
Self(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! define_bun_plugin {
|
||||
($name:expr) => {
|
||||
pub static BUN_PLUGIN_NAME_STRING: &str = $name;
|
||||
|
||||
#[no_mangle]
|
||||
pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName =
|
||||
bun_native_plugin::BunPluginName::new(BUN_PLUGIN_NAME_STRING.as_ptr() as *const _);
|
||||
|
||||
#[napi]
|
||||
fn bun_plugin_register() {}
|
||||
};
|
||||
}
|
||||
|
||||
unsafe impl Sync for BunPluginName {}
|
||||
|
||||
use std::{
|
||||
any::TypeId,
|
||||
borrow::Cow,
|
||||
cell::UnsafeCell,
|
||||
ffi::{c_char, c_void},
|
||||
str::Utf8Error,
|
||||
};
|
||||
|
||||
pub mod sys {
|
||||
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
|
||||
}
|
||||
|
||||
#[repr(C)]
|
||||
pub struct TaggedObject<T> {
|
||||
type_id: TypeId,
|
||||
pub(crate) object: Option<T>,
|
||||
}
|
||||
|
||||
struct SourceCodeContext {
|
||||
source_ptr: *mut u8,
|
||||
source_len: usize,
|
||||
source_cap: usize,
|
||||
}
|
||||
|
||||
extern "C" fn free_plugin_source_code_context(ctx: *mut c_void) {
|
||||
// SAFETY: The ctx pointer is a pointer to the `SourceCodeContext` struct we allocated.
|
||||
unsafe {
|
||||
drop(Box::from_raw(ctx as *mut SourceCodeContext));
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for SourceCodeContext {
|
||||
fn drop(&mut self) {
|
||||
if !self.source_ptr.is_null() {
|
||||
// SAFETY: These fields come from a `String` that we allocated.
|
||||
unsafe {
|
||||
drop(String::from_raw_parts(
|
||||
self.source_ptr,
|
||||
self.source_len,
|
||||
self.source_cap,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type BunLogLevel = sys::BunLogLevel;
|
||||
pub type BunLoader = sys::BunLoader;
|
||||
|
||||
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> Result<Cow<'a, str>> {
|
||||
let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) };
|
||||
|
||||
// Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig.
|
||||
// Meaning the string may contain invalid UTF-8, we'll have to use the safe checked version.
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
std::str::from_utf8(slice)
|
||||
.map(Into::into)
|
||||
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
// SAFETY: The source code comes from Zig, which uses UTF-8, so this should be safe.
|
||||
|
||||
std::str::from_utf8(slice)
|
||||
.map(Into::into)
|
||||
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Error {
|
||||
Utf8(Utf8Error),
|
||||
IncompatiblePluginVersion,
|
||||
ExternalTypeMismatch,
|
||||
Unknown,
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
impl From<Utf8Error> for Error {
|
||||
fn from(value: Utf8Error) -> Self {
|
||||
Self::Utf8(value)
|
||||
}
|
||||
}
|
||||
|
||||
/// A safe handle for the arguments + result struct for the
|
||||
/// `OnBeforeParse` bundler lifecycle hook.
|
||||
///
|
||||
/// This struct acts as a safe wrapper around the raw C API structs
|
||||
/// (`sys::OnBeforeParseArguments`/`sys::OnBeforeParseResult`) needed to
|
||||
/// implement the `OnBeforeParse` bundler lifecycle hook.
|
||||
///
|
||||
/// To initialize this struct, see the `from_raw` method.
|
||||
pub struct OnBeforeParse<'a> {
|
||||
args_raw: &'a sys::OnBeforeParseArguments,
|
||||
result_raw: *mut sys::OnBeforeParseResult,
|
||||
compilation_context: *mut SourceCodeContext,
|
||||
}
|
||||
|
||||
impl<'a> OnBeforeParse<'a> {
|
||||
/// Initialize this struct from references to their raw counterparts.
|
||||
///
|
||||
/// This function will do a versioning check to ensure that the plugin
|
||||
/// is compatible with the current version of Bun. If the plugin is not
|
||||
/// compatible, it will log an error and return an error result.
|
||||
///
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// extern "C" fn on_before_parse_impl(args: *const sys::OnBeforeParseArguments, result: *mut sys::OnBeforeParseResult) {
|
||||
/// let args = unsafe { &*args };
|
||||
/// let result = unsafe { &mut *result };
|
||||
/// let handle = match OnBeforeParse::from_raw(args, result) {
|
||||
/// Ok(handle) => handle,
|
||||
/// Err(()) => return,
|
||||
/// };
|
||||
/// }
|
||||
/// ```
|
||||
pub fn from_raw(
|
||||
args: &'a sys::OnBeforeParseArguments,
|
||||
result: *mut sys::OnBeforeParseResult,
|
||||
) -> Result<Self> {
|
||||
if args.__struct_size < std::mem::size_of::<sys::OnBeforeParseArguments>()
|
||||
|| unsafe { (*result).__struct_size } < std::mem::size_of::<sys::OnBeforeParseResult>()
|
||||
{
|
||||
let message = "This plugin is not compatible with the current version of Bun.";
|
||||
let mut log_options = sys::BunLogOptions {
|
||||
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
|
||||
message_ptr: message.as_ptr(),
|
||||
message_len: message.len(),
|
||||
path_ptr: args.path_ptr,
|
||||
path_len: args.path_len,
|
||||
source_line_text_ptr: std::ptr::null(),
|
||||
source_line_text_len: 0,
|
||||
level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8,
|
||||
line: 0,
|
||||
lineEnd: 0,
|
||||
column: 0,
|
||||
columnEnd: 0,
|
||||
};
|
||||
// SAFETY: The `log` function pointer is guaranteed to be valid by the Bun runtime.
|
||||
unsafe {
|
||||
((*result).log.unwrap())(args, &mut log_options);
|
||||
}
|
||||
return Err(Error::IncompatiblePluginVersion);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
args_raw: args,
|
||||
result_raw: result,
|
||||
compilation_context: std::ptr::null_mut() as *mut _,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn path(&self) -> Result<Cow<'_, str>> {
|
||||
get_from_raw_str(self.args_raw.path_ptr, self.args_raw.path_len)
|
||||
}
|
||||
|
||||
pub fn namespace(&self) -> Result<Cow<'_, str>> {
|
||||
get_from_raw_str(self.args_raw.namespace_ptr, self.args_raw.namespace_len)
|
||||
}
|
||||
|
||||
/// Get the external object from the `OnBeforeParse` arguments.
|
||||
///
|
||||
/// The external object is set by the plugin definition inside of JS:
|
||||
/// ```js
|
||||
/// await Bun.build({
|
||||
/// plugins: [
|
||||
/// {
|
||||
/// name: "my-plugin",
|
||||
/// setup(builder) {
|
||||
/// const native_plugin = require("./native_plugin.node");
|
||||
/// const external = native_plugin.createExternal();
|
||||
/// builder.external({ napiModule: native_plugin, symbol: 'onBeforeParse', external });
|
||||
/// },
|
||||
/// },
|
||||
/// ],
|
||||
/// });
|
||||
/// ```
|
||||
///
|
||||
/// The external object must be created from NAPI for this function to be safe!
|
||||
///
|
||||
/// This function will return an error if the external object is not a
|
||||
/// valid tagged object for the given type.
|
||||
///
|
||||
/// This function will return `Ok(None)` if there is no external object
|
||||
/// set.
|
||||
///
|
||||
/// # Example
|
||||
/// The code to create the external from napi-rs:
|
||||
/// ```rs
|
||||
/// #[no_mangle]
|
||||
/// #[napi]
|
||||
/// pub fn create_my_external() -> External<MyStruct> {
|
||||
/// let external = External::new(MyStruct::new());
|
||||
///
|
||||
/// external
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The code to extract the external:
|
||||
/// ```rust
|
||||
/// let external = match handle.external::<MyStruct>() {
|
||||
/// Ok(Some(external)) => external,
|
||||
/// _ => {
|
||||
/// handle.log_error("Could not get external object.");
|
||||
/// return;
|
||||
/// },
|
||||
/// };
|
||||
/// ```
|
||||
pub unsafe fn external<T: 'static + Sync>(&self) -> Result<Option<&'static T>> {
|
||||
if self.args_raw.external.is_null() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
|
||||
|
||||
unsafe {
|
||||
if (*external).type_id != TypeId::of::<T>() {
|
||||
return Err(Error::ExternalTypeMismatch);
|
||||
}
|
||||
|
||||
Ok((*external).object.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
/// The same as [`crate::bun_native_plugin::OnBeforeParse::external`], but returns a mutable reference.
|
||||
///
|
||||
/// This is unsafe as you must ensure that no other invocation of the plugin
|
||||
/// simultaneously holds a mutable reference to the external.
|
||||
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> Result<Option<&mut T>> {
|
||||
if self.args_raw.external.is_null() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
|
||||
|
||||
unsafe {
|
||||
if (*external).type_id != TypeId::of::<T>() {
|
||||
return Err(Error::ExternalTypeMismatch);
|
||||
}
|
||||
|
||||
Ok((*external).object.as_mut())
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the input source code for the current file.
|
||||
///
|
||||
/// On Windows, this function may return an `Err(Error::Utf8(...))` if the
|
||||
/// source code contains invalid UTF-8.
|
||||
pub fn input_source_code(&self) -> Result<Cow<'_, str>> {
|
||||
let fetch_result = unsafe {
|
||||
((*self.result_raw).fetchSourceCode.unwrap())(self.args_raw, self.result_raw)
|
||||
};
|
||||
|
||||
if fetch_result != 0 {
|
||||
Err(Error::Unknown)
|
||||
} else {
|
||||
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing here is safe.
|
||||
unsafe {
|
||||
get_from_raw_str((*self.result_raw).source_ptr, (*self.result_raw).source_len)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the output source code for the current file.
|
||||
pub fn set_output_source_code(&mut self, source: String, loader: BunLoader) {
|
||||
let source_cap = source.capacity();
|
||||
let source = source.leak();
|
||||
let source_ptr = source.as_mut_ptr();
|
||||
let source_len = source.len();
|
||||
|
||||
if self.compilation_context.is_null() {
|
||||
self.compilation_context = Box::into_raw(Box::new(SourceCodeContext {
|
||||
source_ptr,
|
||||
source_len,
|
||||
source_cap,
|
||||
}));
|
||||
|
||||
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
|
||||
unsafe {
|
||||
(*self.result_raw).plugin_source_code_context =
|
||||
self.compilation_context as *mut c_void;
|
||||
(*self.result_raw).free_plugin_source_code_context =
|
||||
Some(free_plugin_source_code_context);
|
||||
}
|
||||
} else {
|
||||
unsafe {
|
||||
// SAFETY: If we're here we know that `compilation_context` is not null.
|
||||
let context = &mut *self.compilation_context;
|
||||
|
||||
drop(String::from_raw_parts(
|
||||
context.source_ptr,
|
||||
context.source_len,
|
||||
context.source_cap,
|
||||
));
|
||||
|
||||
context.source_ptr = source_ptr;
|
||||
context.source_len = source_len;
|
||||
context.source_cap = source_cap;
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
|
||||
unsafe {
|
||||
(*self.result_raw).loader = loader as u8;
|
||||
(*self.result_raw).source_ptr = source_ptr;
|
||||
(*self.result_raw).source_len = source_len;
|
||||
}
|
||||
}
|
||||
|
||||
/// Set the output loader for the current file.
|
||||
pub fn set_output_loader(&self, loader: BunLogLevel) {
|
||||
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
|
||||
unsafe {
|
||||
(*self.result_raw).loader = loader as u8;
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the output loader for the current file.
|
||||
pub fn output_loader(&self) -> BunLoader {
|
||||
unsafe { std::mem::transmute((*self.result_raw).loader as u32) }
|
||||
}
|
||||
|
||||
/// Log an error message.
|
||||
pub fn log_error(&self, message: &str) {
|
||||
self.log(message, BunLogLevel::BUN_LOG_LEVEL_ERROR)
|
||||
}
|
||||
|
||||
/// Log a message with the given level.
|
||||
pub fn log(&self, message: &str, level: BunLogLevel) {
|
||||
let mut log_options = sys::BunLogOptions {
|
||||
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
|
||||
message_ptr: message.as_ptr(),
|
||||
message_len: message.len(),
|
||||
path_ptr: self.args_raw.path_ptr,
|
||||
path_len: self.args_raw.path_len,
|
||||
source_line_text_ptr: std::ptr::null(),
|
||||
source_line_text_len: 0,
|
||||
level: level as i8,
|
||||
line: 0,
|
||||
lineEnd: 0,
|
||||
column: 0,
|
||||
columnEnd: 0,
|
||||
};
|
||||
unsafe {
|
||||
((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options);
|
||||
}
|
||||
}
|
||||
}
|
||||
1
packages/bun-native-plugin-rs/wrapper.h
Normal file
1
packages/bun-native-plugin-rs/wrapper.h
Normal file
@@ -0,0 +1 @@
|
||||
#include <bun-native-bundler-plugin-api/bundler_plugin.h>
|
||||
Binary file not shown.
@@ -97,6 +97,8 @@ export async function getBuild(): Promise<number> {
|
||||
}
|
||||
|
||||
export async function getSemver(tag?: string, build?: number): Promise<string> {
|
||||
const { tag_name: latest_tag_name } = await getRelease();
|
||||
const version = latest_tag_name.replace("bun-v", "");
|
||||
const { tag_name } = await getRelease(tag);
|
||||
if (tag_name !== "canary") {
|
||||
return tag_name.replace("bun-v", "");
|
||||
@@ -106,7 +108,7 @@ export async function getSemver(tag?: string, build?: number): Promise<string> {
|
||||
}
|
||||
const sha = await getSha(tag_name, "short");
|
||||
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
|
||||
return `${Bun.version}-canary.${date}.${build}+${sha}`;
|
||||
return `${version}-canary.${date}.${build}+${sha}`;
|
||||
}
|
||||
|
||||
export function formatTag(tag: string): string {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user