mirror of
https://github.com/oven-sh/bun
synced 2026-02-07 17:38:46 +00:00
Compare commits
381 Commits
ciro/postg
...
kai/fix-no
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4b2351a12c | ||
|
|
7ee91a9912 | ||
|
|
56ad4cc4a6 | ||
|
|
d2acb2eac0 | ||
|
|
de7eafbdd1 | ||
|
|
4114986c3e | ||
|
|
8aa451c2dc | ||
|
|
497cef9759 | ||
|
|
dd57b95546 | ||
|
|
ea7c4986d7 | ||
|
|
6c7edf2dbe | ||
|
|
bf2f153f5c | ||
|
|
f64a4c4ace | ||
|
|
0216431c98 | ||
|
|
ae289c4858 | ||
|
|
5d1609fe5c | ||
|
|
471fe7b886 | ||
|
|
08222eda71 | ||
|
|
6f8c5959d0 | ||
|
|
40d5e745c9 | ||
|
|
225bfd54fa | ||
|
|
a6ca8c40d4 | ||
|
|
b52ad226a5 | ||
|
|
5f8f805db9 | ||
|
|
37c98bebd6 | ||
|
|
bd01df19c1 | ||
|
|
7fd16ebffa | ||
|
|
1bb211df56 | ||
|
|
bdd0b89f16 | ||
|
|
841f593b12 | ||
|
|
3afd19c73c | ||
|
|
b6a231add3 | ||
|
|
ca86bae5d5 | ||
|
|
215fdb4697 | ||
|
|
578bdf1cd6 | ||
|
|
cf2fa30639 | ||
|
|
5b3c58bdf5 | ||
|
|
0d6d4faa51 | ||
|
|
5e4642295a | ||
|
|
68f026b3cd | ||
|
|
5e9563833d | ||
|
|
6dd44cbeda | ||
|
|
a9ce4d40c2 | ||
|
|
663f00b62b | ||
|
|
769c6de751 | ||
|
|
f21fffd1bf | ||
|
|
d92d8dc886 | ||
|
|
c1a25d0948 | ||
|
|
6d127ba3f4 | ||
|
|
c3d9e8c7af | ||
|
|
c25e744837 | ||
|
|
dc01a5d6a8 | ||
|
|
c434b2c191 | ||
|
|
8ca0eb831d | ||
|
|
b19f13f5c4 | ||
|
|
bb3d570ad0 | ||
|
|
1baa1b6975 | ||
|
|
1789364215 | ||
|
|
a6f37b398c | ||
|
|
bb33176924 | ||
|
|
39af2a0a56 | ||
|
|
4a10bf22f7 | ||
|
|
33d3732d44 | ||
|
|
7f6bb30877 | ||
|
|
812288eb72 | ||
|
|
9cbe1ec300 | ||
|
|
4f8c1c9124 | ||
|
|
468a392fd5 | ||
|
|
f61f03fae3 | ||
|
|
a468d09064 | ||
|
|
898feb886f | ||
|
|
c5cd0e4575 | ||
|
|
f4a0fe40aa | ||
|
|
2d2e329ee3 | ||
|
|
618d2cb3ac | ||
|
|
6c915fc1d0 | ||
|
|
aa60ab3b65 | ||
|
|
f855ae8618 | ||
|
|
514a47cb54 | ||
|
|
1a1cf0a4d7 | ||
|
|
9fbe64619b | ||
|
|
78861829c9 | ||
|
|
642e0ba73c | ||
|
|
19d7a5fe53 | ||
|
|
c04a2d1dfc | ||
|
|
82cb82d828 | ||
|
|
4ae982be4e | ||
|
|
2d65063571 | ||
|
|
e817928981 | ||
|
|
746cf2cf01 | ||
|
|
9c1fde0132 | ||
|
|
f8f76a6fe0 | ||
|
|
4117af6e46 | ||
|
|
5bcaf32ba3 | ||
|
|
d01bfb5aa2 | ||
|
|
78b495aff5 | ||
|
|
6adb3954fe | ||
|
|
b152fbefcd | ||
|
|
8c0c97a273 | ||
|
|
95fcee8b76 | ||
|
|
c3f63bcdc4 | ||
|
|
3fc6ad4982 | ||
|
|
7a623fe3e8 | ||
|
|
f78ac6344b | ||
|
|
2283ed098f | ||
|
|
43dcb8fce1 | ||
|
|
0eb6a4c55e | ||
|
|
94260398b0 | ||
|
|
81690617c0 | ||
|
|
30eda1eca9 | ||
|
|
144db9ca52 | ||
|
|
a6a4ca1e49 | ||
|
|
e0414d0890 | ||
|
|
b191968681 | ||
|
|
314b4d9b44 | ||
|
|
2406936f33 | ||
|
|
24a3f96359 | ||
|
|
0e3e33072b | ||
|
|
3681aa9f0a | ||
|
|
c9d0fd51a9 | ||
|
|
4fe8b71437 | ||
|
|
1efab7f42d | ||
|
|
61a3f08595 | ||
|
|
363595fd31 | ||
|
|
173f67d81e | ||
|
|
05d5ab7489 | ||
|
|
b7bd5a4cf5 | ||
|
|
ab4da13785 | ||
|
|
ab3cb68f66 | ||
|
|
795f14c1d1 | ||
|
|
708ed00705 | ||
|
|
7a73f14da7 | ||
|
|
6ba0563d2d | ||
|
|
dec572eb4b | ||
|
|
01bbe3070a | ||
|
|
ff4eccc3b4 | ||
|
|
ededc168cf | ||
|
|
46c750fc12 | ||
|
|
b0a30ca422 | ||
|
|
1e649b4976 | ||
|
|
fc94db1efb | ||
|
|
958e531cc5 | ||
|
|
206d2edf12 | ||
|
|
ecb0098b89 | ||
|
|
ba767aa5ba | ||
|
|
46515d4865 | ||
|
|
3ef35d746a | ||
|
|
daece6a0ed | ||
|
|
adaee07138 | ||
|
|
8a0666acd1 | ||
|
|
fd1d6b10d4 | ||
|
|
d19c18580b | ||
|
|
f8e9adeb64 | ||
|
|
3c95d5d011 | ||
|
|
9ad3471fb0 | ||
|
|
cba3bda8ec | ||
|
|
5b1808b90b | ||
|
|
b023bb805b | ||
|
|
98bb5999a3 | ||
|
|
5949777ec3 | ||
|
|
7f9935a560 | ||
|
|
437d333978 | ||
|
|
c38eca222e | ||
|
|
d93122a656 | ||
|
|
b02fb2463f | ||
|
|
ba5490dafc | ||
|
|
3e085b5540 | ||
|
|
73e98663bb | ||
|
|
d09050127f | ||
|
|
d5a118e25f | ||
|
|
1911fa1e75 | ||
|
|
6dbf1bff4f | ||
|
|
a5a0539f26 | ||
|
|
3393b0e1d3 | ||
|
|
910efec0b7 | ||
|
|
dafd8156b0 | ||
|
|
befb269b2d | ||
|
|
39d8ade27c | ||
|
|
4fedc41545 | ||
|
|
eef79ce772 | ||
|
|
cf960b5c17 | ||
|
|
15f2bbb33a | ||
|
|
4ddb63e7e2 | ||
|
|
6603871617 | ||
|
|
e5c5033790 | ||
|
|
3791146476 | ||
|
|
07252d1755 | ||
|
|
910e479d29 | ||
|
|
d9c8f27bf9 | ||
|
|
28830f0294 | ||
|
|
266e033d6f | ||
|
|
9a6f033206 | ||
|
|
2aee62382f | ||
|
|
4103b738ff | ||
|
|
2810f39802 | ||
|
|
f73ef54edd | ||
|
|
f9718af6a5 | ||
|
|
f50114332f | ||
|
|
90852a37d5 | ||
|
|
2afb5e635d | ||
|
|
3170b88058 | ||
|
|
134f66c24d | ||
|
|
f37df906b4 | ||
|
|
357581c61a | ||
|
|
d8987ccdb8 | ||
|
|
fdd8d35845 | ||
|
|
ed1f25e5cc | ||
|
|
2646ea0956 | ||
|
|
9fa480ce9b | ||
|
|
83a2c245f3 | ||
|
|
32ddf343ee | ||
|
|
e11a68315b | ||
|
|
bceb0a2327 | ||
|
|
f439dacf21 | ||
|
|
9b0cdf01f9 | ||
|
|
544dd2497c | ||
|
|
0e7ed996d3 | ||
|
|
35513a9d6d | ||
|
|
bb8b46507e | ||
|
|
06d37bf644 | ||
|
|
f8979b05b1 | ||
|
|
6dd369e66b | ||
|
|
8358f4dc73 | ||
|
|
9dbe40ddba | ||
|
|
f54f4e6ebf | ||
|
|
adc00e0566 | ||
|
|
ec91e91fda | ||
|
|
d3b509e80a | ||
|
|
b11d631e41 | ||
|
|
440111f924 | ||
|
|
ccd72755dc | ||
|
|
59700068d3 | ||
|
|
ac8c6f093b | ||
|
|
956853f036 | ||
|
|
a8fa566101 | ||
|
|
7b25ce15eb | ||
|
|
c20c0dea92 | ||
|
|
86d4dbe143 | ||
|
|
c5df329772 | ||
|
|
e945146fde | ||
|
|
83f536f4da | ||
|
|
873b0a7540 | ||
|
|
d11a48398d | ||
|
|
563b3c0339 | ||
|
|
c785ab921b | ||
|
|
6e7240b6e7 | ||
|
|
2335e35a86 | ||
|
|
bd45a65f2b | ||
|
|
7993f4fa11 | ||
|
|
09a6a11a14 | ||
|
|
c17e05c191 | ||
|
|
9ea9925e9c | ||
|
|
469be87987 | ||
|
|
9490c30d47 | ||
|
|
1d8423ea57 | ||
|
|
0bee1c9b5d | ||
|
|
797958082c | ||
|
|
3a71be377e | ||
|
|
1de2319526 | ||
|
|
8b5fb349dd | ||
|
|
2b9abc20da | ||
|
|
d713001e35 | ||
|
|
b49f6d143e | ||
|
|
4cf9851747 | ||
|
|
657f5b9f6a | ||
|
|
86e421ad80 | ||
|
|
56f7c8887b | ||
|
|
62cabe9003 | ||
|
|
e34673ca45 | ||
|
|
7c13e637b8 | ||
|
|
9f3b0f754b | ||
|
|
07a391368f | ||
|
|
c30ef2ccc8 | ||
|
|
855b7101e6 | ||
|
|
fceeb228a8 | ||
|
|
85dcebedd7 | ||
|
|
0f29267a3e | ||
|
|
a152557096 | ||
|
|
80b742665e | ||
|
|
7978505b94 | ||
|
|
0dce7366e2 | ||
|
|
71f3089f4d | ||
|
|
07a217f773 | ||
|
|
d4b710287f | ||
|
|
3296a6edc9 | ||
|
|
ab92fc5fab | ||
|
|
f5dc0498f4 | ||
|
|
1c06dbd3ef | ||
|
|
a6d707a74e | ||
|
|
ce469474d8 | ||
|
|
c659b3b7d3 | ||
|
|
a60ae54751 | ||
|
|
3303a5de1f | ||
|
|
d8557ea982 | ||
|
|
e3b5927e73 | ||
|
|
066b1dacc4 | ||
|
|
7a20f515f7 | ||
|
|
afd023ac95 | ||
|
|
1d5da9ef77 | ||
|
|
7110c073ca | ||
|
|
03d945ee05 | ||
|
|
08116e43f4 | ||
|
|
7fab6701e5 | ||
|
|
30fe8d5258 | ||
|
|
a8a2403568 | ||
|
|
664c080d02 | ||
|
|
b5ed0f028f | ||
|
|
1293039002 | ||
|
|
8bb8193a39 | ||
|
|
dffc718b6a | ||
|
|
f15059face | ||
|
|
2d96ec0e21 | ||
|
|
b6dfd89928 | ||
|
|
936ae5a796 | ||
|
|
fe18b871f8 | ||
|
|
a1c4240940 | ||
|
|
dc4177f113 | ||
|
|
d2c4a9a57e | ||
|
|
fb6a48a35f | ||
|
|
5176ab58bc | ||
|
|
a669ff1243 | ||
|
|
91a52311de | ||
|
|
197a26fc16 | ||
|
|
059185f4ba | ||
|
|
838ca008cd | ||
|
|
8a0a88cd42 | ||
|
|
774bb8923d | ||
|
|
8b19e08882 | ||
|
|
84c4f96b3f | ||
|
|
73579e1254 | ||
|
|
19b0fed84f | ||
|
|
699997826f | ||
|
|
528d9a64e8 | ||
|
|
39b442b664 | ||
|
|
249227d4d6 | ||
|
|
71101e1fa3 | ||
|
|
c28d419b25 | ||
|
|
3587391920 | ||
|
|
43d7cfcb23 | ||
|
|
3ba398f482 | ||
|
|
600bc1cbd2 | ||
|
|
f71b440c4d | ||
|
|
d29e72f89c | ||
|
|
e04f461508 | ||
|
|
bdcca417ef | ||
|
|
b8aba83da6 | ||
|
|
b753e4b38b | ||
|
|
a7bc53b92c | ||
|
|
b2080c88f4 | ||
|
|
710f7790cf | ||
|
|
c44eb732ee | ||
|
|
e5e643d8bc | ||
|
|
d612cfff12 | ||
|
|
020c32bc73 | ||
|
|
a240093a97 | ||
|
|
e5ffd66649 | ||
|
|
5bae294c14 | ||
|
|
ea1ddb2740 | ||
|
|
6d1db2c8e9 | ||
|
|
8c571d8949 | ||
|
|
7be1bf3026 | ||
|
|
2d0e0c9195 | ||
|
|
b773e66d67 | ||
|
|
2fee09fc4d | ||
|
|
216e5b3f96 | ||
|
|
ed4175b80e | ||
|
|
a0c2a73730 | ||
|
|
1649c03824 | ||
|
|
49b2de93d0 | ||
|
|
ef4728c267 | ||
|
|
6a440aa946 | ||
|
|
6169f1053a | ||
|
|
85f617f97e | ||
|
|
23dc0fed71 | ||
|
|
afcf7b1eb6 | ||
|
|
2583f33a33 | ||
|
|
dfa2a6b60b | ||
|
|
e66ec2a10b | ||
|
|
eb8d465c50 | ||
|
|
418139358b | ||
|
|
1f5359705e |
832
.buildkite/ci.mjs
Normal file → Executable file
832
.buildkite/ci.mjs
Normal file → Executable file
File diff suppressed because it is too large
Load Diff
@@ -164,7 +164,9 @@ function upload_s3_file() {
|
||||
|
||||
function send_bench_webhook() {
|
||||
if [ -z "$BENCHMARK_URL" ]; then
|
||||
return 1
|
||||
echo "error: \$BENCHMARK_URL is not set"
|
||||
# exit 1 # TODO: this isn't live yet
|
||||
return
|
||||
fi
|
||||
|
||||
local tag="$1"
|
||||
@@ -200,6 +202,12 @@ function create_release() {
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-linux-aarch64-musl.zip
|
||||
bun-linux-aarch64-musl-profile.zip
|
||||
bun-linux-x64-musl.zip
|
||||
bun-linux-x64-musl-profile.zip
|
||||
bun-linux-x64-musl-baseline.zip
|
||||
bun-linux-x64-musl-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
|
||||
92
.github/workflows/update-cares.yml
vendored
Normal file
92
.github/workflows/update-cares.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update c-ares
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check c-ares version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildCares.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildCares.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildCares.cmake
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
92
.github/workflows/update-libarchive.yml
vendored
Normal file
92
.github/workflows/update-libarchive.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update libarchive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libarchive version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibArchive.cmake
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)
|
||||
92
.github/workflows/update-libdeflate.yml
vendored
Normal file
92
.github/workflows/update-libdeflate.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update libdeflate
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libdeflate version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibDeflate.cmake
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)
|
||||
92
.github/workflows/update-lolhtml.yml
vendored
Normal file
92
.github/workflows/update-lolhtml.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update lolhtml
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lolhtml version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLolHtml.cmake
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)
|
||||
92
.github/workflows/update-lshpack.yml
vendored
Normal file
92
.github/workflows/update-lshpack.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
name: Update lshpack
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 5 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lshpack version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLshpack.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLshpack.cmake
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)
|
||||
109
.github/workflows/update-sqlite3.yml
vendored
Normal file
109
.github/workflows/update-sqlite3.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Update SQLite3
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 0" # Run weekly
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check SQLite version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get current version from the header file using SQLITE_VERSION_NUMBER
|
||||
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
|
||||
if [ -z "$CURRENT_VERSION_NUM" ]; then
|
||||
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
|
||||
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
|
||||
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
|
||||
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Fetch SQLite download page
|
||||
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
|
||||
if [ -z "$DOWNLOAD_PAGE" ]; then
|
||||
echo "Error: Failed to fetch SQLite download page"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract latest version and year from the amalgamation link
|
||||
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
|
||||
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
|
||||
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
|
||||
|
||||
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
|
||||
echo "Error: Could not extract latest version info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
|
||||
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
|
||||
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
|
||||
|
||||
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
|
||||
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Debug output
|
||||
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
|
||||
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
|
||||
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
src/bun.js/bindings/sqlite/sqlite3.c
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)
|
||||
19
.gitignore
vendored
19
.gitignore
vendored
@@ -26,6 +26,7 @@
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.generated.ts
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
@@ -53,8 +54,8 @@
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
/test.zig
|
||||
/testdir
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
@@ -111,8 +112,10 @@ pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
scripts/env.local
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bake/generated.ts
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
@@ -131,6 +134,7 @@ src/runtime.version
|
||||
src/tests.zig
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
test/node.js/upstream
|
||||
tsconfig.tsbuildinfo
|
||||
txt.js
|
||||
x64
|
||||
@@ -142,6 +146,9 @@ test/node.js/upstream
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
@@ -149,22 +156,24 @@ src/bake/generated.ts
|
||||
# Dependencies (before CMake)
|
||||
# These can be removed in the far future
|
||||
/src/bun.js/WebKit
|
||||
/src/deps/WebKit
|
||||
/src/deps/boringssl
|
||||
/src/deps/brotli
|
||||
/src/deps/c*ares
|
||||
/src/deps/lol*html
|
||||
/src/deps/libarchive
|
||||
/src/deps/libdeflate
|
||||
/src/deps/libuv
|
||||
/src/deps/lol*html
|
||||
/src/deps/ls*hpack
|
||||
/src/deps/mimalloc
|
||||
/src/deps/picohttpparser
|
||||
/src/deps/tinycc
|
||||
/src/deps/zstd
|
||||
/src/deps/zlib
|
||||
/src/deps/WebKit
|
||||
/src/deps/zig
|
||||
/src/deps/zlib
|
||||
/src/deps/zstd
|
||||
|
||||
# Generated files
|
||||
|
||||
.buildkite/ci.yml
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
3
.vscode/launch.json
generated
vendored
3
.vscode/launch.json
generated
vendored
@@ -224,8 +224,11 @@
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
// "BUN_DEBUG_DEBUGGER": "1",
|
||||
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -78,7 +78,7 @@
|
||||
"prettier.prettierPath": "./node_modules/prettier",
|
||||
|
||||
// TypeScript
|
||||
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
91
.vscode/tasks.json
vendored
91
.vscode/tasks.json
vendored
@@ -2,50 +2,57 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Install Dependencies",
|
||||
"command": "scripts/all-dependencies.sh",
|
||||
"windows": {
|
||||
"command": "scripts/all-dependencies.ps1",
|
||||
},
|
||||
"icon": {
|
||||
"id": "arrow-down",
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Setup Environment",
|
||||
"dependsOn": ["Install Dependencies"],
|
||||
"command": "scripts/setup.sh",
|
||||
"windows": {
|
||||
"command": "scripts/setup.ps1",
|
||||
},
|
||||
"icon": {
|
||||
"id": "check",
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
},
|
||||
{
|
||||
"type": "process",
|
||||
"label": "Build Bun",
|
||||
"dependsOn": ["Setup Environment"],
|
||||
"command": "bun",
|
||||
"args": ["run", "build"],
|
||||
"icon": {
|
||||
"id": "gear",
|
||||
"type": "shell",
|
||||
"command": "bun run build",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true,
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}",
|
||||
},
|
||||
"isBuildCommand": true,
|
||||
"runOptions": {
|
||||
"instanceLimit": 1,
|
||||
"reevaluateOnRerun": true,
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "zig",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s+(.+)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"owner": "clang",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*(.*)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "shared",
|
||||
"clear": true,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
@@ -11,7 +11,7 @@ Bun currently requires `glibc >=2.32` in development which means if you're on Ub
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
{% codetabs %}
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
@@ -60,7 +60,7 @@ $ brew install bun
|
||||
|
||||
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
|
||||
{% codetabs %}
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install llvm@18
|
||||
@@ -97,7 +97,7 @@ $ which clang-16
|
||||
|
||||
If not, run this to manually add it:
|
||||
|
||||
{% codetabs %}
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
@@ -285,7 +285,7 @@ If you see this error when compiling, run:
|
||||
$ xcode-select --install
|
||||
```
|
||||
|
||||
## Cannot find `libatomic.a`
|
||||
### Cannot find `libatomic.a`
|
||||
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
@@ -295,7 +295,7 @@ $ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
## ccache conflicts with building TinyCC on macOS
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
@@ -303,3 +303,9 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
|
||||
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`
|
||||
|
||||
18
ci/linux/Dockerfile
Normal file
18
ci/linux/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
ARG IMAGE=debian:11
|
||||
FROM $IMAGE
|
||||
COPY ./scripts/bootstrap.sh /tmp/bootstrap.sh
|
||||
ENV CI=true
|
||||
RUN sh /tmp/bootstrap.sh && rm -rf /tmp/*
|
||||
WORKDIR /workspace/bun
|
||||
COPY bunfig.toml bunfig.toml
|
||||
COPY package.json package.json
|
||||
COPY CMakeLists.txt CMakeLists.txt
|
||||
COPY cmake/ cmake/
|
||||
COPY scripts/ scripts/
|
||||
COPY patches/ patches/
|
||||
COPY *.zig ./
|
||||
COPY src/ src/
|
||||
COPY packages/ packages/
|
||||
COPY test/ test/
|
||||
RUN bun i
|
||||
RUN bun run build:ci
|
||||
27
ci/linux/scripts/set-hostname.sh
Normal file
27
ci/linux/scripts/set-hostname.sh
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script sets the hostname of the current machine.
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <hostname>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -f "$(which hostnamectl)" ]; then
|
||||
execute hostnamectl set-hostname "$1"
|
||||
else
|
||||
echo "Error: hostnamectl is not installed." >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
||||
22
ci/linux/scripts/start-tailscale.sh
Normal file
22
ci/linux/scripts/start-tailscale.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script starts tailscale on the current machine.
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
if [ "$#" -ne 1 ]; then
|
||||
echo "Usage: $0 <auth-key>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute tailscale up --reset --ssh --accept-risk=lose-ssh --auth-key="$1"
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -2,7 +2,7 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
|
||||
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
|
||||
"login": "token=$(gh auth token); username=$(gh api user --jq .login); echo \"Login as $username...\"; echo \"$token\" | tart login ghcr.io --username \"$username\" --password-stdin; echo \"$token\" | docker login ghcr.io --username \"$username\" --password-stdin",
|
||||
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
|
||||
"fetch:darwin-version": "echo 1",
|
||||
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
|
||||
|
||||
@@ -265,7 +265,7 @@ if(ENABLE_LTO)
|
||||
endif()
|
||||
|
||||
# --- Remapping ---
|
||||
if(UNIX)
|
||||
if(UNIX AND CI)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Remap source files"
|
||||
-ffile-prefix-map=${CWD}=.
|
||||
|
||||
@@ -105,14 +105,6 @@ else()
|
||||
unsupported(CMAKE_HOST_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
if(EXISTS "/lib/ld-musl-aarch64.so.1")
|
||||
set(IS_MUSL ON)
|
||||
elseif(EXISTS "/lib/ld-musl-x86_64.so.1")
|
||||
set(IS_MUSL ON)
|
||||
else()
|
||||
set(IS_MUSL OFF)
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(HOST_OS "aarch64")
|
||||
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
|
||||
|
||||
@@ -10,7 +10,6 @@ optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
|
||||
|
||||
if(BUILDKITE)
|
||||
optionx(BUILDKITE_COMMIT STRING "The commit hash")
|
||||
optionx(BUILDKITE_MESSAGE STRING "The commit message")
|
||||
endif()
|
||||
|
||||
optionx(CMAKE_BUILD_TYPE "Debug|Release|RelWithDebInfo|MinSizeRel" "The build type to use" REQUIRED)
|
||||
@@ -49,6 +48,16 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
if(EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_ABI "musl")
|
||||
else()
|
||||
set(DEFAULT_ABI "gnu")
|
||||
endif()
|
||||
|
||||
optionx(ABI "musl|gnu" "The ABI to use (e.g. musl, gnu)" DEFAULT ${DEFAULT_ABI})
|
||||
endif()
|
||||
|
||||
if(ARCH STREQUAL "x64")
|
||||
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
|
||||
endif()
|
||||
@@ -56,14 +65,7 @@ endif()
|
||||
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
|
||||
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
|
||||
|
||||
if(BUILDKITE_MESSAGE AND BUILDKITE_MESSAGE MATCHES "\\[release build\\]")
|
||||
message(STATUS "Switched to release build, since commit message contains: \"[release build]\"")
|
||||
set(DEFAULT_CANARY OFF)
|
||||
else()
|
||||
set(DEFAULT_CANARY ON)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ${DEFAULT_CANARY})
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
|
||||
|
||||
if(ENABLE_CANARY AND BUILDKITE)
|
||||
execute_process(
|
||||
|
||||
@@ -484,14 +484,12 @@ set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
set(IS_ARM64 ON)
|
||||
if(APPLE)
|
||||
set(ZIG_CPU "apple_m1")
|
||||
else()
|
||||
set(ZIG_CPU "native")
|
||||
endif()
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
|
||||
set(IS_X86_64 ON)
|
||||
if(ENABLE_BASELINE)
|
||||
set(ZIG_CPU "nehalem")
|
||||
else()
|
||||
@@ -528,6 +526,7 @@ register_command(
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
-Dcodegen_path=${CODEGEN_PATH}
|
||||
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
|
||||
--prominent-compile-errors
|
||||
${ZIG_FLAGS_BUN}
|
||||
ARTIFACTS
|
||||
${BUN_ZIG_OUTPUT}
|
||||
@@ -760,8 +759,8 @@ if(NOT WIN32)
|
||||
)
|
||||
if(DEBUG)
|
||||
# TODO: this shouldn't be necessary long term
|
||||
if (NOT IS_MUSL)
|
||||
set(ABI_PUBLIC_FLAGS
|
||||
if (NOT ABI STREQUAL "musl")
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=null
|
||||
-fsanitize-recover=all
|
||||
-fsanitize=bounds
|
||||
@@ -772,14 +771,9 @@ if(NOT WIN32)
|
||||
-fsanitize=returns-nonnull-attribute
|
||||
-fsanitize=unreachable
|
||||
)
|
||||
set(ABI_PRIVATE_FLAGS
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
-fsanitize=null
|
||||
)
|
||||
else()
|
||||
set(ABI_PUBLIC_FLAGS
|
||||
)
|
||||
set(ABI_PRIVATE_FLAGS
|
||||
)
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC
|
||||
@@ -797,10 +791,6 @@ if(NOT WIN32)
|
||||
-Wno-unused-function
|
||||
-Wno-nullability-completeness
|
||||
-Werror
|
||||
${ABI_PUBLIC_FLAGS}
|
||||
)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${ABI_PRIVATE_FLAGS}
|
||||
)
|
||||
else()
|
||||
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
|
||||
@@ -845,65 +835,48 @@ if(WIN32)
|
||||
/delayload:IPHLPAPI.dll
|
||||
)
|
||||
endif()
|
||||
elseif(APPLE)
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-dead_strip
|
||||
-dead_strip_dylibs
|
||||
-Wl,-ld_new
|
||||
-Wl,-no_compact_unwind
|
||||
-Wl,-stack_size,0x1200000
|
||||
-fno-keep-static-consts
|
||||
-Wl,-map,${bun}.linker-map
|
||||
)
|
||||
else()
|
||||
# Try to use lld-16 if available, otherwise fallback to lld
|
||||
# Cache it so we don't have to re-run CMake to pick it up
|
||||
if((NOT DEFINED LLD_NAME) AND (NOT CI OR BUN_LINK_ONLY))
|
||||
find_program(LLD_EXECUTABLE_NAME lld-${LLVM_VERSION_MAJOR})
|
||||
endif()
|
||||
|
||||
if(NOT LLD_EXECUTABLE_NAME)
|
||||
if(CI)
|
||||
# Ensure we don't use a differing version of lld in CI vs clang
|
||||
message(FATAL_ERROR "lld-${LLVM_VERSION_MAJOR} not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and set to lld-${LLVM_VERSION_MAJOR}")
|
||||
endif()
|
||||
|
||||
# To make it easier for contributors, allow differing versions of lld vs clang/cmake
|
||||
find_program(LLD_EXECUTABLE_NAME lld)
|
||||
if(LINUX)
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
if(ARCH STREQUAL "aarch64")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ARCH STREQUAL "x64")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=fcntl
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=fstat
|
||||
-Wl,--wrap=fstat64
|
||||
-Wl,--wrap=fstatat
|
||||
-Wl,--wrap=fstatat64
|
||||
-Wl,--wrap=lstat
|
||||
-Wl,--wrap=lstat64
|
||||
-Wl,--wrap=mknod
|
||||
-Wl,--wrap=mknodat
|
||||
-Wl,--wrap=stat
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT LLD_EXECUTABLE_NAME)
|
||||
message(FATAL_ERROR "LLD not found. Please make sure you have LLVM ${LLVM_VERSION_MAJOR}.x installed and lld is available in your PATH as lld-${LLVM_VERSION_MAJOR}")
|
||||
endif()
|
||||
|
||||
# normalize to basename so it can be used with -fuse-ld
|
||||
get_filename_component(LLD_NAME ${LLD_EXECUTABLE_NAME} NAME CACHE)
|
||||
message(STATUS "Using linker: ${LLD_NAME} (${LLD_EXECUTABLE_NAME})")
|
||||
elseif(NOT DEFINED LLD_NAME)
|
||||
set(LLD_NAME lld-${LLVM_VERSION_MAJOR})
|
||||
endif()
|
||||
|
||||
if (IS_ARM64)
|
||||
set(ARCH_WRAP_FLAGS
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
elseif(IS_X86_64)
|
||||
set(ARCH_WRAP_FLAGS
|
||||
-Wl,--wrap=fcntl
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=fstat
|
||||
-Wl,--wrap=fstat64
|
||||
-Wl,--wrap=fstatat
|
||||
-Wl,--wrap=fstatat64
|
||||
-Wl,--wrap=lstat
|
||||
-Wl,--wrap=lstat64
|
||||
-Wl,--wrap=mknod
|
||||
-Wl,--wrap=mknodat
|
||||
-Wl,--wrap=stat
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT IS_MUSL)
|
||||
set(ABI_WRAP_FLAGS
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=cosf
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
@@ -920,26 +893,37 @@ else()
|
||||
-Wl,--wrap=sinf
|
||||
-Wl,--wrap=tanf
|
||||
)
|
||||
endif()
|
||||
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
)
|
||||
else()
|
||||
set(ABI_WRAP_FLAGS
|
||||
target_link_options(${bun} PUBLIC
|
||||
-lstdc++
|
||||
-lgcc
|
||||
)
|
||||
endif()
|
||||
|
||||
target_link_options(${bun} PUBLIC
|
||||
-fuse-ld=${LLD_NAME}
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
-static-libstdc++
|
||||
-static-libgcc
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,--gc-sections
|
||||
-Wl,-z,stack-size=12800000
|
||||
${ARCH_WRAP_FLAGS}
|
||||
${ABI_WRAP_FLAGS}
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
-Wl,-z,combreloc
|
||||
-Wl,--no-eh-frame-hdr
|
||||
-Wl,--sort-section=name
|
||||
-Wl,--hash-style=gnu
|
||||
-Wl,--build-id=sha1 # Better for debugging than default
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -1079,6 +1063,18 @@ endif()
|
||||
# --- Packaging ---
|
||||
|
||||
if(NOT BUN_CPP_ONLY)
|
||||
set(CMAKE_STRIP_FLAGS "")
|
||||
if(APPLE)
|
||||
# We do not build with exceptions enabled. These are generated by lolhtml
|
||||
# and other dependencies. We build lolhtml with abort on panic, so it
|
||||
# shouldn't be including these in the first place.
|
||||
set(CMAKE_STRIP_FLAGS --remove-section=__TEXT,__eh_frame --remove-section=__TEXT,__unwind_info --remove-section=__TEXT,__gcc_except_tab)
|
||||
elseif(LINUX AND NOT ABI STREQUAL "musl")
|
||||
# When you use llvm-strip to do this, it doesn't delete it from the binary and instead keeps it as [LOAD #2 [R]]
|
||||
# So, we must use GNU strip to do this.
|
||||
set(CMAKE_STRIP_FLAGS -R .eh_frame -R .gcc_except_table)
|
||||
endif()
|
||||
|
||||
if(bunStrip)
|
||||
register_command(
|
||||
TARGET
|
||||
@@ -1090,6 +1086,7 @@ if(NOT BUN_CPP_ONLY)
|
||||
COMMAND
|
||||
${CMAKE_STRIP}
|
||||
${bunExe}
|
||||
${CMAKE_STRIP_FLAGS}
|
||||
--strip-all
|
||||
--strip-debug
|
||||
--discard-all
|
||||
@@ -1165,10 +1162,12 @@ if(NOT BUN_CPP_ONLY)
|
||||
endif()
|
||||
|
||||
if(CI)
|
||||
set(bunTriplet bun-${OS}-${ARCH})
|
||||
if(LINUX AND ABI STREQUAL "musl")
|
||||
set(bunTriplet ${bunTriplet}-musl)
|
||||
endif()
|
||||
if(ENABLE_BASELINE)
|
||||
set(bunTriplet bun-${OS}-${ARCH}-baseline)
|
||||
else()
|
||||
set(bunTriplet bun-${OS}-${ARCH})
|
||||
set(bunTriplet ${bunTriplet}-baseline)
|
||||
endif()
|
||||
string(REPLACE bun ${bunTriplet} bunPath ${bun})
|
||||
set(bunFiles ${bunExe} features.json)
|
||||
@@ -1177,6 +1176,12 @@ if(NOT BUN_CPP_ONLY)
|
||||
elseif(APPLE)
|
||||
list(APPEND bunFiles ${bun}.dSYM)
|
||||
endif()
|
||||
|
||||
if(APPLE OR LINUX)
|
||||
list(APPEND bunFiles ${bun}.linker-map)
|
||||
endif()
|
||||
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
c-ares/c-ares
|
||||
COMMIT
|
||||
d1722e6e8acaf10eb73fa995798a9cd421d9f85e
|
||||
41ee334af3e3d0027dca5e477855d0244936bd49
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
ebiggers/libdeflate
|
||||
COMMIT
|
||||
dc76454a39e7e83b68c3704b6e3784654f8d5ac5
|
||||
9d624d1d8ba82c690d6d6be1d0a961fc5a983ea4
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
8d4c273ded322193d017042d1f48df2766b0f88b
|
||||
4f8becea13a0021c8b71abd2dcc5899384973b66
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
@@ -26,6 +26,13 @@ if(RELEASE)
|
||||
list(APPEND LOLHTML_BUILD_ARGS --release)
|
||||
endif()
|
||||
|
||||
# Windows requires unwind tables, apparently.
|
||||
if (NOT WIN32)
|
||||
# The encoded escape sequences are intentional. They're how you delimit multiple arguments in a single environment variable.
|
||||
# Also add rust optimization flag for smaller binary size, but not huge speed penalty.
|
||||
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
lolhtml
|
||||
@@ -37,6 +44,11 @@ register_command(
|
||||
${LOLHTML_BUILD_ARGS}
|
||||
ARTIFACTS
|
||||
${LOLHTML_LIBRARY}
|
||||
ENVIRONMENT
|
||||
CARGO_TERM_COLOR=always
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
litespeedtech/ls-hpack
|
||||
COMMIT
|
||||
3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0
|
||||
32e96f10593c7cb8553cd8c9c12721100ae9e924
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
6
cmake/toolchains/linux-aarch64-musl.cmake
Normal file
6
cmake/toolchains/linux-aarch64-musl.cmake
Normal file
@@ -0,0 +1,6 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
set(ABI musl)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
@@ -1,5 +1,6 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
set(ABI gnu)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
@@ -1,6 +1,7 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR x64)
|
||||
set(ENABLE_BASELINE ON)
|
||||
set(ABI gnu)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
7
cmake/toolchains/linux-x64-musl-baseline.cmake
Normal file
7
cmake/toolchains/linux-x64-musl-baseline.cmake
Normal file
@@ -0,0 +1,7 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR x64)
|
||||
set(ENABLE_BASELINE ON)
|
||||
set(ABI musl)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
6
cmake/toolchains/linux-x64-musl.cmake
Normal file
6
cmake/toolchains/linux-x64-musl.cmake
Normal file
@@ -0,0 +1,6 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR x64)
|
||||
set(ABI musl)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
@@ -1,5 +1,6 @@
|
||||
set(CMAKE_SYSTEM_NAME Linux)
|
||||
set(CMAKE_SYSTEM_PROCESSOR x64)
|
||||
set(ABI gnu)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
|
||||
@@ -29,7 +29,7 @@ execute_process(
|
||||
)
|
||||
|
||||
if(NOT GIT_DIFF_RESULT EQUAL 0)
|
||||
message(${WARNING} "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
|
||||
message(WARNING "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR IS_MUSL)
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
else()
|
||||
set(DEFAULT_LLVM_VERSION "16.0.6")
|
||||
@@ -52,6 +52,7 @@ if(UNIX)
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}/bin
|
||||
/usr/lib/llvm${LLVM_VERSION_MAJOR}/bin
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -108,8 +109,23 @@ else()
|
||||
find_llvm_command(CMAKE_CXX_COMPILER clang++)
|
||||
find_llvm_command(CMAKE_LINKER llvm-link)
|
||||
find_llvm_command(CMAKE_AR llvm-ar)
|
||||
find_llvm_command(CMAKE_STRIP llvm-strip)
|
||||
if (LINUX)
|
||||
# On Linux, strip ends up being more useful for us.
|
||||
find_command(
|
||||
VARIABLE
|
||||
CMAKE_STRIP
|
||||
COMMAND
|
||||
strip
|
||||
REQUIRED
|
||||
ON
|
||||
)
|
||||
else()
|
||||
find_llvm_command(CMAKE_STRIP llvm-strip)
|
||||
endif()
|
||||
find_llvm_command(CMAKE_RANLIB llvm-ranlib)
|
||||
if(LINUX)
|
||||
find_llvm_command(LLD_PROGRAM ld.lld)
|
||||
endif()
|
||||
if(APPLE)
|
||||
find_llvm_command(CMAKE_DSYMUTIL dsymutil)
|
||||
endif()
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 73b551e25d97e463e8e2c86cb819b8639fcbda06)
|
||||
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
|
||||
endif()
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
@@ -63,7 +63,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
if(IS_MUSL)
|
||||
if(LINUX AND ABI STREQUAL "musl")
|
||||
set(WEBKIT_SUFFIX "-musl")
|
||||
endif()
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ if(APPLE)
|
||||
elseif(WIN32)
|
||||
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-windows-msvc)
|
||||
elseif(LINUX)
|
||||
if(IS_MUSL)
|
||||
if(ABI STREQUAL "musl")
|
||||
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-musl)
|
||||
else()
|
||||
set(DEFAULT_ZIG_TARGET ${DEFAULT_ZIG_ARCH}-linux-gnu)
|
||||
|
||||
@@ -1,30 +1,13 @@
|
||||
FROM alpine:3.18 AS build
|
||||
FROM alpine:3.20 AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
# TODO: Instead of downloading glibc from a third-party source, we should
|
||||
# build it from source. This is a temporary solution.
|
||||
# See: https://github.com/sgerrand/alpine-pkg-glibc
|
||||
|
||||
# https://github.com/sgerrand/alpine-pkg-glibc/releases
|
||||
# https://github.com/sgerrand/alpine-pkg-glibc/issues/176
|
||||
ARG GLIBC_VERSION=2.34-r0
|
||||
|
||||
# https://github.com/oven-sh/bun/issues/5545#issuecomment-1722461083
|
||||
ARG GLIBC_VERSION_AARCH64=2.26-r1
|
||||
|
||||
RUN apk --no-cache add \
|
||||
ca-certificates \
|
||||
curl \
|
||||
dirmngr \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
unzip \
|
||||
RUN apk --no-cache add ca-certificates curl dirmngr gpg gpg-agent unzip \
|
||||
&& arch="$(apk --print-arch)" \
|
||||
&& case "${arch##*-}" in \
|
||||
x86_64) build="x64-baseline";; \
|
||||
aarch64) build="aarch64";; \
|
||||
x86_64) build="x64-musl-baseline";; \
|
||||
aarch64) build="aarch64-musl";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
@@ -59,37 +42,9 @@ RUN apk --no-cache add \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& cd /tmp \
|
||||
&& case "${arch##*-}" in \
|
||||
x86_64) curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc v${GLIBC_VERSION}" && exit 1) \
|
||||
&& mv "glibc-${GLIBC_VERSION}.apk" glibc.apk \
|
||||
&& curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION}" && exit 1) \
|
||||
&& mv "glibc-bin-${GLIBC_VERSION}.apk" glibc-bin.apk ;; \
|
||||
aarch64) curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-${GLIBC_VERSION_AARCH64}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc v${GLIBC_VERSION_AARCH64}" && exit 1) \
|
||||
&& mv "glibc-${GLIBC_VERSION_AARCH64}.apk" glibc.apk \
|
||||
&& curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-bin-${GLIBC_VERSION_AARCH64}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION_AARCH64}" && exit 1) \
|
||||
&& mv "glibc-bin-${GLIBC_VERSION_AARCH64}.apk" glibc-bin.apk ;; \
|
||||
*) echo "error: unsupported architecture '$arch'"; exit 1 ;; \
|
||||
esac
|
||||
&& chmod +x /usr/local/bin/bun
|
||||
|
||||
FROM alpine:3.18
|
||||
FROM alpine:3.20
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
@@ -107,10 +62,8 @@ COPY docker-entrypoint.sh /usr/local/bin/
|
||||
RUN --mount=type=bind,from=build,source=/tmp,target=/tmp \
|
||||
addgroup -g 1000 bun \
|
||||
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
|
||||
&& apk --no-cache --force-overwrite --allow-untrusted add \
|
||||
/tmp/glibc.apk \
|
||||
/tmp/glibc-bin.apk \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& apk add libgcc libstdc++ \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
@@ -75,7 +75,7 @@ To instead throw an error when a parameter is missing and allow binding without
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const strict = new Database(
|
||||
":memory:",
|
||||
":memory:",
|
||||
{ strict: true }
|
||||
);
|
||||
|
||||
@@ -177,7 +177,7 @@ const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
|
||||
## WAL mode
|
||||
|
||||
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent writes. It's broadly recommended to enable WAL mode for most typical applications.
|
||||
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent readers and a single writer. It's broadly recommended to enable WAL mode for most typical applications.
|
||||
|
||||
To enable WAL mode, run this pragma query at the beginning of your application:
|
||||
|
||||
|
||||
@@ -55,6 +55,51 @@ $ bun test ./test/specific-file.test.ts
|
||||
|
||||
The test runner runs all tests in a single process. It loads all `--preload` scripts (see [Lifecycle](https://bun.sh/docs/test/lifecycle) for details), then runs all tests. If a test fails, the test runner will exit with a non-zero exit code.
|
||||
|
||||
## CI/CD integration
|
||||
|
||||
`bun test` supports a variety of CI/CD integrations.
|
||||
|
||||
### GitHub Actions
|
||||
|
||||
`bun test` automatically detects if it's running inside GitHub Actions and will emit GitHub Actions annotations to the console directly.
|
||||
|
||||
No configuration is needed, other than installing `bun` in the workflow and running `bun test`.
|
||||
|
||||
#### How to install `bun` in a GitHub Actions workflow
|
||||
|
||||
To use `bun test` in a GitHub Actions workflow, add the following step:
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
name: build-app
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
- name: Install dependencies # (assuming your project has dependencies)
|
||||
run: bun install # You can use npm/yarn/pnpm instead if you prefer
|
||||
- name: Run tests
|
||||
run: bun test
|
||||
```
|
||||
|
||||
From there, you'll get GitHub Actions annotations.
|
||||
|
||||
### JUnit XML reports (GitLab, etc.)
|
||||
|
||||
To use `bun test` with a JUnit XML reporter, you can use the `--reporter=junit` in combination with `--reporter-outfile`.
|
||||
|
||||
```sh
|
||||
$ bun test --reporter=junit --reporter-outfile=./bun.xml
|
||||
```
|
||||
|
||||
This will continue to output to stdout/stderr as usual, and also write a JUnit
|
||||
XML report to the given path at the very end of the test run.
|
||||
|
||||
JUnit XML is a popular format for reporting test results in CI/CD pipelines.
|
||||
|
||||
## Timeouts
|
||||
|
||||
Use the `--timeout` flag to specify a _per-test_ timeout in milliseconds. If a test times out, it will be marked as failed. The default value is `5000`.
|
||||
@@ -81,7 +126,7 @@ Use the `--bail` flag to abort the test run early after a pre-determined number
|
||||
$ bun test --bail
|
||||
|
||||
# bail after 10 failure
|
||||
$ bun test --bail 10
|
||||
$ bun test --bail=10
|
||||
```
|
||||
|
||||
## Watch mode
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
- pages
|
||||
- auto-bundle dependencies
|
||||
- pages is function that returns a list of pages?
|
||||
- plugins for svelte and vue
|
||||
- custom loaders
|
||||
- HMR
|
||||
- server endpoints
|
||||
|
||||
```ts
|
||||
Bun.serve({});
|
||||
```
|
||||
@@ -1,31 +0,0 @@
|
||||
To create a new React app:
|
||||
|
||||
```bash
|
||||
$ bun create react ./app
|
||||
$ cd app
|
||||
$ bun dev # start dev server
|
||||
```
|
||||
|
||||
To use an existing React app:
|
||||
|
||||
```bash
|
||||
$ bun add -d react-refresh # install React Fast Refresh
|
||||
$ bun bun ./src/index.js # generate a bundle for your entry point(s)
|
||||
$ bun dev # start the dev server
|
||||
```
|
||||
|
||||
From there, Bun relies on the filesystem for mapping dev server paths to source files. All URL paths are relative to the project root (where `package.json` is located).
|
||||
|
||||
Here are examples of routing source code file paths:
|
||||
|
||||
| Dev Server URL | File Path (relative to cwd) |
|
||||
| -------------------------- | --------------------------- |
|
||||
| /src/components/Button.tsx | src/components/Button.tsx |
|
||||
| /src/index.tsx | src/index.tsx |
|
||||
| /pages/index.js | pages/index.js |
|
||||
|
||||
You do not need to include file extensions in `import` paths. CommonJS-style import paths without the file extension work.
|
||||
|
||||
You can override the public directory by passing `--public-dir="path-to-folder"`.
|
||||
|
||||
If no directory is specified and `./public/` doesn’t exist, Bun will try `./static/`. If `./static/` does not exist, but won’t serve from a public directory. If you pass `--public-dir=./` Bun will serve from the current directory, but it will check the current directory last instead of first.
|
||||
@@ -1,77 +0,0 @@
|
||||
## With `bun dev`
|
||||
|
||||
When importing CSS in JavaScript-like loaders, CSS is treated special.
|
||||
|
||||
By default, Bun will transform a statement like this:
|
||||
|
||||
```js
|
||||
import "../styles/global.css";
|
||||
```
|
||||
|
||||
### When `platform` is `browser`
|
||||
|
||||
```js
|
||||
globalThis.document?.dispatchEvent(
|
||||
new CustomEvent("onimportcss", {
|
||||
detail: "http://localhost:3000/styles/globals.css",
|
||||
}),
|
||||
);
|
||||
```
|
||||
|
||||
An event handler for turning that into a `<link>` is automatically registered when HMR is enabled. That event handler can be turned off either in a framework’s `package.json` or by setting `globalThis["Bun_disableCSSImports"] = true;` in client-side code. Additionally, you can get a list of every .css file imported this way via `globalThis["__BUN"].allImportedStyles`.
|
||||
|
||||
### When `platform` is `bun`
|
||||
|
||||
```js
|
||||
//@import url("http://localhost:3000/styles/globals.css");
|
||||
```
|
||||
|
||||
Additionally, Bun exposes an API for SSR/SSG that returns a flat list of URLs to css files imported. That function is `Bun.getImportedStyles()`.
|
||||
|
||||
```ts
|
||||
// This specifically is for "framework" in package.json when loaded via `bun dev`
|
||||
// This API needs to be changed somewhat to work more generally with Bun.js
|
||||
// Initially, you could only use Bun.js through `bun dev`
|
||||
// and this API was created at that time
|
||||
addEventListener("fetch", async (event: FetchEvent) => {
|
||||
let route = Bun.match(event);
|
||||
const App = await import("pages/_app");
|
||||
|
||||
// This returns all .css files that were imported in the line above.
|
||||
// It’s recursive, so any file that imports a CSS file will be included.
|
||||
const appStylesheets = bun.getImportedStyles();
|
||||
|
||||
// ...rest of code
|
||||
});
|
||||
```
|
||||
|
||||
This is useful for preventing flash of unstyled content.
|
||||
|
||||
## With `bun bun`
|
||||
|
||||
Bun bundles `.css` files imported via `@import` into a single file. It doesn’t auto-prefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. You’ll have to import those from a `.css` file.
|
||||
|
||||
This input:
|
||||
|
||||
```css
|
||||
@import url("./hi.css");
|
||||
@import url("./hello.css");
|
||||
@import url("./yo.css");
|
||||
```
|
||||
|
||||
Becomes:
|
||||
|
||||
```css
|
||||
/* hi.css */
|
||||
/* ...contents of hi.css */
|
||||
/* hello.css */
|
||||
/* ...contents of hello.css */
|
||||
/* yo.css */
|
||||
/* ...contents of yo.css */
|
||||
```
|
||||
|
||||
## CSS runtime
|
||||
|
||||
To support hot CSS reloading, Bun inserts `@supports` annotations into CSS that tag which files a stylesheet is composed of. Browsers ignore this, so it doesn’t impact styles.
|
||||
|
||||
By default, Bun’s runtime code automatically listens to `onimportcss` and will insert the `event.detail` into a `<link rel="stylesheet" href={${event.detail}}>` if there is no existing `link` tag with that stylesheet. That’s how Bun’s equivalent of `style-loader` works.
|
||||
@@ -1,26 +0,0 @@
|
||||
## Creating a Discord bot with Bun
|
||||
|
||||
Discord bots perform actions in response to _application commands_. There are 3 types of commands accessible in different interfaces: the chat input, a message's context menu (top-right menu or right-clicking in a message), and a user's context menu (right-clicking on a user).
|
||||
|
||||
To get started you can use the interactions template:
|
||||
|
||||
```bash
|
||||
bun create discord-interactions my-interactions-bot
|
||||
cd my-interactions-bot
|
||||
```
|
||||
|
||||
If you don't have a Discord bot/application yet, you can create one [here (https://discord.com/developers/applications/me)](https://discord.com/developers/applications/me).
|
||||
|
||||
Invite bot to your server by visiting `https://discord.com/api/oauth2/authorize?client_id=<your_application_id>&scope=bot%20applications.commands`
|
||||
|
||||
Afterwards you will need to get your bot's token, public key, and application id from the application page and put them into `.env.example` file
|
||||
|
||||
Then you can run the http server that will handle your interactions:
|
||||
|
||||
```bash
|
||||
$ bun install
|
||||
$ mv .env.example .env
|
||||
$ bun run.js # listening on port 1337
|
||||
```
|
||||
|
||||
Discord does not accept an insecure HTTP server, so you will need to provide an SSL certificate or put the interactions server behind a secure reverse proxy. For development, you can use ngrok/cloudflare tunnel to expose local ports as secure URL.
|
||||
@@ -63,4 +63,4 @@ process.on("exit", kill);
|
||||
|
||||
---
|
||||
|
||||
At the time of writing, Bun hasn't implemented the `node:cluster` module yet, but this is a faster, simple, and limited alternative. We will also implement `node:cluster` in the future.
|
||||
Bun has also implemented the `node:cluster` module, but this is a faster, simple, and limited alternative.
|
||||
|
||||
@@ -14,7 +14,7 @@ To bail after a certain threshold of failures, optionally specify a number after
|
||||
|
||||
```sh
|
||||
# bail after 10 failures
|
||||
$ bun test --bail 10
|
||||
$ bun test --bail=10
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -57,7 +57,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
|
||||
``` -->
|
||||
|
||||
```sh
|
||||
$ bun test --bail 3
|
||||
$ bun test --bail=3
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -44,10 +44,17 @@ test.todo("unimplemented feature", () => {
|
||||
|
||||
---
|
||||
|
||||
If an implementation is provided, it will be executed and _expected to fail_ by test runner! If a todo test passes, the `bun test` run will return a non-zero exit code to signal the failure.
|
||||
If an implementation is provided, it will not be run unless the `--todo` flag is passed. If the `--todo` flag is passed, the test will be executed and _expected to fail_ by test runner! If a todo test passes, the `bun test` run will return a non-zero exit code to signal the failure.
|
||||
|
||||
```sh
|
||||
$ bun test
|
||||
$ bun test --todo
|
||||
my.test.ts:
|
||||
✗ unimplemented feature
|
||||
^ this test is marked as todo but passes. Remove `.todo` or check that test is correct.
|
||||
|
||||
0 pass
|
||||
1 fail
|
||||
1 expect() calls
|
||||
$ echo $?
|
||||
1 # this is the exit code of the previous command
|
||||
```
|
||||
|
||||
@@ -73,8 +73,7 @@ There are also image variants for different operating systems.
|
||||
$ docker pull oven/bun:debian
|
||||
$ docker pull oven/bun:slim
|
||||
$ docker pull oven/bun:distroless
|
||||
# alpine not recommended until #918 is fixed
|
||||
# $ docker pull oven/bun:alpine
|
||||
$ docker pull oven/bun:alpine
|
||||
```
|
||||
|
||||
## Checking installation
|
||||
@@ -190,14 +189,19 @@ For convenience, here are download links for the latest version:
|
||||
|
||||
- [`bun-linux-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip)
|
||||
- [`bun-linux-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-baseline.zip)
|
||||
- [`bun-linux-x64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl.zip)
|
||||
- [`bun-linux-x64-musl-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64-musl-baseline.zip)
|
||||
- [`bun-windows-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64.zip)
|
||||
- [`bun-windows-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-windows-x64-baseline.zip)
|
||||
- [`bun-darwin-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-aarch64.zip)
|
||||
- [`bun-linux-aarch64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64.zip)
|
||||
- [`bun-linux-aarch64-musl.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-linux-aarch64-musl.zip)
|
||||
- [`bun-darwin-x64.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64.zip)
|
||||
- [`bun-darwin-x64-baseline.zip`](https://github.com/oven-sh/bun/releases/latest/download/bun-darwin-x64-baseline.zip)
|
||||
|
||||
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically choose the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
The `musl` binaries are built for distributions that do not ship with the glibc libraries by default, instead relying on musl. The two most popular distros are Void Linux and Alpine Linux, with the latter is used heavily in Docker containers. If you encounter an error like the following: `bun: /lib/x86_64-linux-gnu/libm.so.6: version GLIBC_2.29' not found (required by bun)`, try using the musl binary. Bun's install script automatically chooses the correct binary for your system.
|
||||
|
||||
The `baseline` binaries are built for older CPUs which may not support AVX2 instructions. If you run into an "Illegal Instruction" error when running Bun, try using the `baseline` binaries instead. Bun's install scripts automatically chooses the correct binary for your system which helps avoid this issue. Baseline builds are slower than regular builds, so use them only if necessary.
|
||||
|
||||
<!--
|
||||
## Native
|
||||
|
||||
@@ -73,15 +73,10 @@ After Visual Studio, you need the following:
|
||||
**Note** – The Zig compiler is automatically downloaded, installed, and updated by the building process.
|
||||
{% /callout %}
|
||||
|
||||
[WinGet](https://learn.microsoft.com/windows/package-manager/winget) or [Scoop](https://scoop.sh) can be used to install these remaining tools easily:
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ps1#WinGet
|
||||
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
|
||||
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS Ccache.Ccache
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl ccache
|
||||
@@ -91,20 +86,16 @@ After Visual Studio, you need the following:
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
{% callout %}
|
||||
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that will conflict with MSVC and break the build.
|
||||
{% /callout %}
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ps1#WinGet
|
||||
> winget install ezwinports.make Cygwin.Cygwin Python.Python.3.12
|
||||
```
|
||||
|
||||
```ps1#Scoop
|
||||
> scoop install make cygwin python
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
# RFCs
|
||||
|
||||
| Number | Name | Issue |
|
||||
| ------ | ---- | ----- |
|
||||
@@ -97,7 +97,7 @@ test.skip("wat", () => {
|
||||
|
||||
## `test.todo`
|
||||
|
||||
Mark a test as a todo with `test.todo`. These tests _will_ be run, and the test runner will expect them to fail. If they pass, you will be prompted to mark it as a regular test.
|
||||
Mark a test as a todo with `test.todo`. These tests will not be run.
|
||||
|
||||
```ts
|
||||
import { expect, test } from "bun:test";
|
||||
@@ -107,12 +107,22 @@ test.todo("fix this", () => {
|
||||
});
|
||||
```
|
||||
|
||||
To exclusively run tests marked as _todo_, use `bun test --todo`.
|
||||
To run todo tests and find any which are passing, use `bun test --todo`.
|
||||
|
||||
```sh
|
||||
$ bun test --todo
|
||||
my.test.ts:
|
||||
✗ unimplemented feature
|
||||
^ this test is marked as todo but passes. Remove `.todo` or check that test is correct.
|
||||
|
||||
0 pass
|
||||
1 fail
|
||||
1 expect() calls
|
||||
```
|
||||
|
||||
With this flag, failing todo tests will not cause an error, but todo tests which pass will be marked as failing so you can remove the todo mark or
|
||||
fix the test.
|
||||
|
||||
## `test.only`
|
||||
|
||||
To run a particular test or suite of tests use `test.only()` or `describe.only()`. Once declared, running `bun test --only` will only execute tests/suites that have been marked with `.only()`. Running `bun test` without the `--only` option with `test.only()` declared will result in all tests in the given suite being executed _up to_ the test with `.only()`. `describe.only()` functions the same in both execution scenarios.
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
## Troubleshooting
|
||||
|
||||
### Bun not running on an M1 (or Apple Silicon)
|
||||
|
||||
If you see a message like this
|
||||
|
||||
> [1] 28447 killed bun create next ./test
|
||||
|
||||
It most likely means you’re running Bun’s x64 version on Apple Silicon. This happens if Bun is running via Rosetta. Rosetta is unable to emulate AVX2 instructions, which Bun indirectly uses.
|
||||
|
||||
The fix is to ensure you installed a version of Bun built for Apple Silicon.
|
||||
|
||||
### error: Unexpected
|
||||
|
||||
If you see an error like this:
|
||||
|
||||

|
||||
|
||||
It usually means the max number of open file descriptors is being explicitly set to a low number. By default, Bun requests the max number of file descriptors available (which on macOS, is something like 32,000). But, if you previously ran into ulimit issues with, e.g., Chokidar, someone on The Internet may have advised you to run `ulimit -n 8192`.
|
||||
|
||||
That advice unfortunately **lowers** the hard limit to `8192`. This can be a problem in large repositories or projects with lots of dependencies. Chokidar (and other watchers) don’t seem to call `setrlimit`, which means they’re reliant on the (much lower) soft limit.
|
||||
|
||||
To fix this issue:
|
||||
|
||||
1. Remove any scripts that call `ulimit -n` and restart your shell.
|
||||
2. Try again, and if the error still occurs, try setting `ulimit -n` to an absurdly high number, such as `ulimit -n 2147483646`
|
||||
3. Try again, and if that still doesn’t fix it, open an issue
|
||||
|
||||
### Unzip is required
|
||||
|
||||
Unzip is required to install Bun on Linux. You can use one of the following commands to install `unzip`:
|
||||
|
||||
#### Debian / Ubuntu / Mint
|
||||
|
||||
```sh
|
||||
$ sudo apt install unzip
|
||||
```
|
||||
|
||||
#### RedHat / CentOS / Fedora
|
||||
|
||||
```sh
|
||||
$ sudo dnf install unzip
|
||||
```
|
||||
|
||||
#### Arch / Manjaro
|
||||
|
||||
```sh
|
||||
$ sudo pacman -S unzip
|
||||
```
|
||||
|
||||
#### OpenSUSE
|
||||
|
||||
```sh
|
||||
$ sudo zypper install unzip
|
||||
```
|
||||
|
||||
### bun install is stuck
|
||||
|
||||
Please run `bun install --verbose 2> logs.txt` and send them to me in Bun's discord. If you're on Linux, it would also be helpful if you run `sudo perf trace bun install --silent` and attach the logs.
|
||||
|
||||
### Uninstalling
|
||||
|
||||
Bun's binary and install cache is located in `~/.bun` by default. To uninstall bun, delete this directory and edit your shell config (`.bashrc`, `.zshrc`, or similar) to remove `~/.bun/bin` from the `$PATH` variable.
|
||||
|
||||
```sh
|
||||
$ rm -rf ~/.bun # make sure to remove ~/.bun/bin from $PATH
|
||||
```
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.35",
|
||||
"version": "1.1.39",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "bun-debug-adapter-protocol",
|
||||
"version": "0.0.1",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"semver": "^7.5.4",
|
||||
"source-map-js": "^1.0.2"
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
import type { InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector";
|
||||
import type { JSC } from "../../../bun-inspector-protocol/src/protocol";
|
||||
import type { DAP } from "../protocol";
|
||||
// @ts-ignore
|
||||
import { ChildProcess, spawn } from "node:child_process";
|
||||
import { EventEmitter } from "node:events";
|
||||
import { AddressInfo, createServer } from "node:net";
|
||||
import { AddressInfo, createServer, Socket } from "node:net";
|
||||
import * as path from "node:path";
|
||||
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index";
|
||||
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal";
|
||||
import { Location, SourceMap } from "./sourcemap";
|
||||
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index.ts";
|
||||
import type { Inspector, InspectorEventMap } from "../../../bun-inspector-protocol/src/inspector/index.d.ts";
|
||||
import { NodeSocketInspector } from "../../../bun-inspector-protocol/src/inspector/node-socket.ts";
|
||||
import type { JSC } from "../../../bun-inspector-protocol/src/protocol/index.d.ts";
|
||||
import type { DAP } from "../protocol/index.d.ts";
|
||||
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal.ts";
|
||||
import { Location, SourceMap } from "./sourcemap.ts";
|
||||
|
||||
export async function getAvailablePort(): Promise<number> {
|
||||
const server = createServer();
|
||||
server.listen(0);
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise(resolve => {
|
||||
server.on("listening", () => {
|
||||
const { port } = server.address() as AddressInfo;
|
||||
server.close(() => {
|
||||
@@ -105,7 +105,18 @@ const capabilities: DAP.Capabilities = {
|
||||
|
||||
type InitializeRequest = DAP.InitializeRequest & {
|
||||
supportsConfigurationDoneRequest?: boolean;
|
||||
};
|
||||
enableControlFlowProfiler?: boolean;
|
||||
enableDebugger?: boolean;
|
||||
} & (
|
||||
| {
|
||||
enableLifecycleAgentReporter?: false;
|
||||
sendImmediatePreventExit?: false;
|
||||
}
|
||||
| {
|
||||
enableLifecycleAgentReporter: true;
|
||||
sendImmediatePreventExit?: boolean;
|
||||
}
|
||||
);
|
||||
|
||||
type LaunchRequest = DAP.LaunchRequest & {
|
||||
runtime?: string;
|
||||
@@ -231,10 +242,14 @@ function normalizeSourcePath(sourcePath: string, untitledDocPath?: string, bunEv
|
||||
return path.normalize(sourcePath);
|
||||
}
|
||||
|
||||
export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements IDebugAdapter {
|
||||
export abstract class BaseDebugAdapter<T extends Inspector = Inspector>
|
||||
extends EventEmitter<DebugAdapterEventMap>
|
||||
implements IDebugAdapter
|
||||
{
|
||||
protected readonly inspector: T;
|
||||
protected options?: DebuggerOptions;
|
||||
|
||||
#threadId: number;
|
||||
#inspector: WebSocketInspector;
|
||||
#process?: ChildProcess;
|
||||
#sourceId: number;
|
||||
#pendingSources: Map<string, ((source: Source) => void)[]>;
|
||||
#sources: Map<string | number, Source>;
|
||||
@@ -247,20 +262,21 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
#targets: Map<number, Target>;
|
||||
#variableId: number;
|
||||
#variables: Map<number, Variable>;
|
||||
#initialized?: InitializeRequest;
|
||||
#options?: DebuggerOptions;
|
||||
#untitledDocPath?: string;
|
||||
#bunEvalPath?: string;
|
||||
#initialized?: InitializeRequest;
|
||||
|
||||
constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
protected constructor(inspector: T, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super();
|
||||
this.#untitledDocPath = untitledDocPath;
|
||||
this.#bunEvalPath = bunEvalPath;
|
||||
this.#threadId = threadId++;
|
||||
this.#inspector = new WebSocketInspector(url);
|
||||
const emit = this.#inspector.emit.bind(this.#inspector);
|
||||
this.#inspector.emit = (event, ...args) => {
|
||||
this.inspector = inspector;
|
||||
const emit = this.inspector.emit.bind(this.inspector);
|
||||
this.inspector.emit = (event, ...args) => {
|
||||
let sent = false;
|
||||
sent ||= emit(event, ...args);
|
||||
sent ||= this.emit(event, ...(args as any));
|
||||
sent ||= this.emit(event as keyof JSC.EventMap, ...(args as any));
|
||||
return sent;
|
||||
};
|
||||
this.#sourceId = 1;
|
||||
@@ -274,26 +290,27 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#targets = new Map();
|
||||
this.#variableId = 1;
|
||||
this.#variables = new Map();
|
||||
this.#untitledDocPath = untitledDocPath;
|
||||
this.#bunEvalPath = bunEvalPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the inspector url.
|
||||
* Gets the inspector url. This is deprecated and exists for compat.
|
||||
* @deprecated You should get the inspector directly (with .getInspector()), and if it's a WebSocketInspector you can access `.url` direclty.
|
||||
*/
|
||||
get url(): string {
|
||||
return this.#inspector.url;
|
||||
// This code has been migrated from a time when the inspector was always a WebSocketInspector.
|
||||
if (this.inspector instanceof WebSocketInspector) {
|
||||
return this.inspector.url;
|
||||
}
|
||||
|
||||
throw new Error("Inspector does not offer a URL");
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the inspector.
|
||||
* @param url the inspector url
|
||||
* @returns if the inspector was able to connect
|
||||
*/
|
||||
start(url?: string): Promise<boolean> {
|
||||
return this.#attach({ url });
|
||||
public getInspector() {
|
||||
return this.inspector;
|
||||
}
|
||||
|
||||
abstract start(...args: unknown[]): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Sends a request to the JavaScript inspector.
|
||||
* @param method the method name
|
||||
@@ -306,7 +323,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
* console.log(result.value); // 2
|
||||
*/
|
||||
async send<M extends keyof JSC.ResponseMap>(method: M, params?: JSC.RequestMap[M]): Promise<JSC.ResponseMap[M]> {
|
||||
return this.#inspector.send(method, params);
|
||||
return this.inspector.send(method, params);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -347,7 +364,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
return sent;
|
||||
}
|
||||
|
||||
#emit<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
protected emitAdapterEvent<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
this.emit("Adapter.event", {
|
||||
type: "event",
|
||||
seq: 0,
|
||||
@@ -359,7 +376,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
#emitAfterResponse<E extends keyof DAP.EventMap>(event: E, body?: DAP.EventMap[E]): void {
|
||||
this.once("Adapter.response", () => {
|
||||
process.nextTick(() => {
|
||||
this.#emit(event, body);
|
||||
this.emitAdapterEvent(event, body);
|
||||
});
|
||||
});
|
||||
}
|
||||
@@ -437,19 +454,37 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.emit(`Adapter.${name}` as keyof DebugAdapterEventMap, body);
|
||||
}
|
||||
|
||||
initialize(request: InitializeRequest): DAP.InitializeResponse {
|
||||
public initialize(request: InitializeRequest): DAP.InitializeResponse {
|
||||
this.#initialized = request;
|
||||
|
||||
this.send("Inspector.enable");
|
||||
this.send("Runtime.enable");
|
||||
this.send("Console.enable");
|
||||
this.send("Debugger.enable").catch(error => {
|
||||
const { message } = unknownToError(error);
|
||||
if (message !== "Debugger domain already enabled") {
|
||||
throw error;
|
||||
|
||||
if (request.enableControlFlowProfiler) {
|
||||
this.send("Runtime.enableControlFlowProfiler");
|
||||
}
|
||||
|
||||
if (request.enableLifecycleAgentReporter) {
|
||||
this.send("LifecycleReporter.enable");
|
||||
|
||||
if (request.sendImmediatePreventExit) {
|
||||
this.send("LifecycleReporter.preventExit");
|
||||
}
|
||||
});
|
||||
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
|
||||
}
|
||||
|
||||
// use !== false because by default if unspecified we want to enable the debugger
|
||||
// and this option didn't exist beforehand, so we can't make it non-optional
|
||||
if (request.enableDebugger !== false) {
|
||||
this.send("Debugger.enable").catch(error => {
|
||||
const { message } = unknownToError(error);
|
||||
if (message !== "Debugger domain already enabled") {
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
this.send("Debugger.setAsyncStackTraceDepth", { depth: 200 });
|
||||
}
|
||||
|
||||
const { clientID, supportsConfigurationDoneRequest } = request;
|
||||
if (!supportsConfigurationDoneRequest && clientID !== "vscode") {
|
||||
@@ -463,248 +498,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
configurationDone(): void {
|
||||
// If the client requested that `noDebug` mode be enabled,
|
||||
// then we need to disable all breakpoints and pause on statements.
|
||||
const active = !this.#options?.noDebug;
|
||||
const active = !this.options?.noDebug;
|
||||
this.send("Debugger.setBreakpointsActive", { active });
|
||||
|
||||
// Tell the debugger that its ready to start execution.
|
||||
this.send("Inspector.initialized");
|
||||
}
|
||||
|
||||
async launch(request: DAP.LaunchRequest): Promise<void> {
|
||||
this.#options = { ...request, type: "launch" };
|
||||
|
||||
try {
|
||||
await this.#launch(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.#emit("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #launch(request: LaunchRequest): Promise<void> {
|
||||
const {
|
||||
runtime = "bun",
|
||||
runtimeArgs = [],
|
||||
program,
|
||||
args = [],
|
||||
cwd,
|
||||
env = {},
|
||||
strictEnv = false,
|
||||
watchMode = false,
|
||||
stopOnEntry = false,
|
||||
__skipValidation = false,
|
||||
stdin,
|
||||
} = request;
|
||||
|
||||
if (!__skipValidation && !program) {
|
||||
throw new Error("No program specified");
|
||||
}
|
||||
|
||||
const processArgs = [...runtimeArgs];
|
||||
|
||||
if (program === "-" && stdin) {
|
||||
processArgs.push("--eval", stdin);
|
||||
} else if (program) {
|
||||
processArgs.push(program);
|
||||
}
|
||||
|
||||
processArgs.push(...args);
|
||||
|
||||
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
|
||||
processArgs.unshift("test");
|
||||
}
|
||||
|
||||
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
|
||||
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
|
||||
}
|
||||
|
||||
const processEnv = strictEnv
|
||||
? {
|
||||
...env,
|
||||
}
|
||||
: {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
// we're on unix
|
||||
const url = `ws+unix://${randomUnixPath()}`;
|
||||
const signal = new UnixSignal();
|
||||
|
||||
signal.on("Signal.received", () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
} else {
|
||||
// we're on windows
|
||||
// Create TCPSocketSignal
|
||||
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
|
||||
const signal = new TCPSocketSignal(await getAvailablePort());
|
||||
|
||||
signal.on("Signal.received", async () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async #spawn(options: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string | undefined>;
|
||||
isDebugee?: boolean;
|
||||
}): Promise<boolean> {
|
||||
const { command, args = [], cwd, env, isDebugee } = options;
|
||||
const request = { command, args, cwd, env };
|
||||
this.emit("Process.requested", request);
|
||||
|
||||
let subprocess: ChildProcess;
|
||||
try {
|
||||
subprocess = spawn(command, args, {
|
||||
...request,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
} catch (cause) {
|
||||
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
|
||||
return false;
|
||||
}
|
||||
|
||||
subprocess.on("spawn", () => {
|
||||
this.emit("Process.spawned", subprocess);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = subprocess;
|
||||
this.#emit("process", {
|
||||
name: `${command} ${args.join(" ")}`,
|
||||
systemProcessId: subprocess.pid,
|
||||
isLocalProcess: true,
|
||||
startMethod: "launch",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.on("exit", (code, signal) => {
|
||||
this.emit("Process.exited", code, signal);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = undefined;
|
||||
this.#emit("exited", {
|
||||
exitCode: code ?? -1,
|
||||
});
|
||||
this.#emit("terminated");
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.stdout?.on("data", data => {
|
||||
this.emit("Process.stdout", data.toString());
|
||||
});
|
||||
|
||||
subprocess.stderr?.on("data", data => {
|
||||
this.emit("Process.stderr", data.toString());
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
subprocess.on("spawn", () => resolve(true));
|
||||
subprocess.on("exit", () => resolve(false));
|
||||
subprocess.on("error", () => resolve(false));
|
||||
});
|
||||
}
|
||||
|
||||
async attach(request: AttachRequest): Promise<void> {
|
||||
this.#options = { ...request, type: "attach" };
|
||||
|
||||
try {
|
||||
await this.#attach(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.#emit("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #attach(request: AttachRequest): Promise<boolean> {
|
||||
const { url } = request;
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const ok = await this.#inspector.start(url);
|
||||
if (ok) {
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 100 * i));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
// Required so all implementations have a method that .terminate() always calls.
|
||||
// This is useful because we don't want any implementors to forget
|
||||
protected abstract exitJSProcess(): void;
|
||||
|
||||
terminate(): void {
|
||||
if (!this.#process?.kill()) {
|
||||
this.#evaluate({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
|
||||
this.#emit("terminated");
|
||||
this.exitJSProcess();
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
|
||||
disconnect(request: DAP.DisconnectRequest): void {
|
||||
@@ -1077,7 +884,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
for (const breakpoint of breakpoints) {
|
||||
this.#emit("breakpoint", {
|
||||
this.emitAdapterEvent("breakpoint", {
|
||||
reason: "removed",
|
||||
breakpoint,
|
||||
});
|
||||
@@ -1316,7 +1123,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
const objectGroup = callFrameId ? "debugger" : context;
|
||||
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression,
|
||||
objectGroup,
|
||||
callFrameId,
|
||||
@@ -1337,7 +1144,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
};
|
||||
}
|
||||
|
||||
async #evaluate(options: {
|
||||
protected async evaluateInternal(options: {
|
||||
expression: string;
|
||||
objectGroup?: string;
|
||||
callFrameId?: string;
|
||||
@@ -1361,7 +1168,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
|
||||
const { expression, hint } = completionToExpression(text);
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression: expression || "this",
|
||||
callFrameId,
|
||||
objectGroup: "repl",
|
||||
@@ -1393,33 +1200,29 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
["Inspector.connected"](): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output: "Debugger attached.\n",
|
||||
});
|
||||
|
||||
this.#emit("initialized");
|
||||
this.emitAdapterEvent("initialized");
|
||||
}
|
||||
|
||||
async ["Inspector.disconnected"](error?: Error): Promise<void> {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output: "Debugger detached.\n",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
const { message } = error;
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `${message}\n`,
|
||||
});
|
||||
}
|
||||
|
||||
this.#reset();
|
||||
|
||||
if (this.#process?.exitCode !== null) {
|
||||
this.#emit("terminated");
|
||||
}
|
||||
this.resetInternal();
|
||||
}
|
||||
|
||||
async ["Debugger.scriptParsed"](event: JSC.Debugger.ScriptParsedEvent): Promise<void> {
|
||||
@@ -1470,7 +1273,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
return;
|
||||
}
|
||||
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: errorMessage,
|
||||
line: this.#lineFrom0BasedLine(errorLine),
|
||||
@@ -1498,7 +1301,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const breakpoint = breakpoints[i];
|
||||
const oldBreakpoint = oldBreakpoints[i];
|
||||
|
||||
this.#emit("breakpoint", {
|
||||
this.emitAdapterEvent("breakpoint", {
|
||||
reason: "changed",
|
||||
breakpoint: {
|
||||
...breakpoint,
|
||||
@@ -1581,7 +1384,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
}
|
||||
|
||||
this.#emit("stopped", {
|
||||
this.emitAdapterEvent("stopped", {
|
||||
threadId: this.#threadId,
|
||||
reason: this.#stopped,
|
||||
hitBreakpointIds,
|
||||
@@ -1598,20 +1401,20 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
}
|
||||
|
||||
this.#emit("continued", {
|
||||
this.emitAdapterEvent("continued", {
|
||||
threadId: this.#threadId,
|
||||
});
|
||||
}
|
||||
|
||||
["Process.stdout"](output: string): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output,
|
||||
});
|
||||
}
|
||||
|
||||
["Process.stderr"](output: string): void {
|
||||
this.#emit("output", {
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "debug console",
|
||||
output,
|
||||
});
|
||||
@@ -1695,8 +1498,8 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
|
||||
// If the path changed or the source has a source reference,
|
||||
// the old source should be marked as removed.
|
||||
if (path !== oldPath || sourceReference) {
|
||||
this.#emit("loadedSource", {
|
||||
if (path !== oldPath /*|| sourceReference*/) {
|
||||
this.emitAdapterEvent("loadedSource", {
|
||||
reason: "removed",
|
||||
source: oldSource,
|
||||
});
|
||||
@@ -1706,7 +1509,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#sources.set(sourceId, source);
|
||||
this.#sources.set(scriptId, source);
|
||||
|
||||
this.#emit("loadedSource", {
|
||||
this.emitAdapterEvent("loadedSource", {
|
||||
// If the reason is "changed", the source will be retrieved using
|
||||
// the `source` command, which is why it cannot be set when `path` is present.
|
||||
reason: oldSource && !path ? "changed" : "new",
|
||||
@@ -1762,9 +1565,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
// If the source is not present, it may not have been loaded yet.
|
||||
let resolves = this.#pendingSources.get(sourceId);
|
||||
let resolves = this.#pendingSources.get(sourceId.toString());
|
||||
if (!resolves) {
|
||||
this.#pendingSources.set(sourceId, (resolves = []));
|
||||
this.#pendingSources.set(sourceId.toString(), (resolves = []));
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
@@ -2016,7 +1819,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
const callFrameId = this.#getCallFrameId(frameId);
|
||||
const objectGroup = callFrameId ? "debugger" : "repl";
|
||||
|
||||
const { result, wasThrown } = await this.#evaluate({
|
||||
const { result, wasThrown } = await this.evaluateInternal({
|
||||
expression: `${expression} = (${value});`,
|
||||
objectGroup: "repl",
|
||||
callFrameId,
|
||||
@@ -2216,12 +2019,11 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.#process?.kill();
|
||||
this.#inspector.close();
|
||||
this.#reset();
|
||||
this.inspector.close();
|
||||
this.resetInternal();
|
||||
}
|
||||
|
||||
#reset(): void {
|
||||
protected resetInternal(): void {
|
||||
this.#pendingSources.clear();
|
||||
this.#sources.clear();
|
||||
this.#stackFrames.length = 0;
|
||||
@@ -2232,10 +2034,309 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
|
||||
this.#functionBreakpoints.clear();
|
||||
this.#targets.clear();
|
||||
this.#variables.clear();
|
||||
this.#options = undefined;
|
||||
this.options = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a debug adapter that connects over a unix/tcp socket. Usually
|
||||
* in the case of a reverse connection. This is used by the vscode extension.
|
||||
*
|
||||
* @warning This will gracefully handle socket closure, you don't need to add extra handling.
|
||||
*/
|
||||
export class NodeSocketDebugAdapter extends BaseDebugAdapter<NodeSocketInspector> {
|
||||
public constructor(socket: Socket, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super(new NodeSocketInspector(socket), untitledDocPath, bunEvalPath);
|
||||
|
||||
socket.once("close", () => {
|
||||
this.resetInternal();
|
||||
});
|
||||
}
|
||||
|
||||
protected exitJSProcess(): void {
|
||||
this.evaluateInternal({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
|
||||
public async start() {
|
||||
const ok = await this.inspector.start();
|
||||
return ok;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The default debug adapter. Connects via WebSocket
|
||||
*/
|
||||
export class WebSocketDebugAdapter extends BaseDebugAdapter<WebSocketInspector> {
|
||||
#process?: ChildProcess;
|
||||
|
||||
public constructor(url?: string | URL, untitledDocPath?: string, bunEvalPath?: string) {
|
||||
super(new WebSocketInspector(url), untitledDocPath, bunEvalPath);
|
||||
}
|
||||
|
||||
async ["Inspector.disconnected"](error?: Error): Promise<void> {
|
||||
await super["Inspector.disconnected"](error);
|
||||
|
||||
if (this.#process?.exitCode !== null) {
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
}
|
||||
|
||||
protected exitJSProcess() {
|
||||
if (!this.#process?.kill()) {
|
||||
this.evaluateInternal({
|
||||
expression: "process.exit(0)",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Starts the inspector.
|
||||
* @param url the inspector url, will default to the one provided in the constructor (if any). If none
|
||||
* @returns if the inspector was able to connect
|
||||
*/
|
||||
start(url?: string): Promise<boolean> {
|
||||
return this.#attach({ url });
|
||||
}
|
||||
|
||||
close() {
|
||||
this.#process?.kill();
|
||||
super.close();
|
||||
}
|
||||
|
||||
async launch(request: DAP.LaunchRequest): Promise<void> {
|
||||
this.options = { ...request, type: "launch" };
|
||||
|
||||
try {
|
||||
await this.#launch(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #launch(request: LaunchRequest): Promise<void> {
|
||||
const {
|
||||
runtime = "bun",
|
||||
runtimeArgs = [],
|
||||
program,
|
||||
args = [],
|
||||
cwd,
|
||||
env = {},
|
||||
strictEnv = false,
|
||||
watchMode = false,
|
||||
stopOnEntry = false,
|
||||
__skipValidation = false,
|
||||
stdin,
|
||||
} = request;
|
||||
|
||||
if (!__skipValidation && !program) {
|
||||
throw new Error("No program specified");
|
||||
}
|
||||
|
||||
const processArgs = [...runtimeArgs];
|
||||
|
||||
if (program === "-" && stdin) {
|
||||
processArgs.push("--eval", stdin);
|
||||
} else if (program) {
|
||||
processArgs.push(program);
|
||||
}
|
||||
|
||||
processArgs.push(...args);
|
||||
|
||||
if (program && isTestJavaScript(program) && !runtimeArgs.includes("test")) {
|
||||
processArgs.unshift("test");
|
||||
}
|
||||
|
||||
if (watchMode && !runtimeArgs.includes("--watch") && !runtimeArgs.includes("--hot")) {
|
||||
processArgs.unshift(watchMode === "hot" ? "--hot" : "--watch");
|
||||
}
|
||||
|
||||
const processEnv = strictEnv
|
||||
? {
|
||||
...env,
|
||||
}
|
||||
: {
|
||||
...process.env,
|
||||
...env,
|
||||
};
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
// we're on unix
|
||||
const url = `ws+unix://${randomUnixPath()}`;
|
||||
const signal = new UnixSignal();
|
||||
|
||||
signal.on("Signal.received", () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
} else {
|
||||
// we're on windows
|
||||
// Create TCPSocketSignal
|
||||
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
|
||||
const signal = new TCPSocketSignal(await getAvailablePort());
|
||||
|
||||
signal.on("Signal.received", async () => {
|
||||
this.#attach({ url });
|
||||
});
|
||||
|
||||
this.once("Adapter.terminated", () => {
|
||||
signal.close();
|
||||
});
|
||||
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
processEnv["BUN_INSPECT"] = `${url}?${query}`;
|
||||
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
|
||||
|
||||
// This is probably not correct, but it's the best we can do for now.
|
||||
processEnv["FORCE_COLOR"] = "1";
|
||||
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
|
||||
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
|
||||
|
||||
const started = await this.#spawn({
|
||||
command: runtime,
|
||||
args: processArgs,
|
||||
env: processEnv,
|
||||
cwd,
|
||||
isDebugee: true,
|
||||
});
|
||||
|
||||
if (!started) {
|
||||
throw new Error("Program could not be started.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async #spawn(options: {
|
||||
command: string;
|
||||
args?: string[];
|
||||
cwd?: string;
|
||||
env?: Record<string, string | undefined>;
|
||||
isDebugee?: boolean;
|
||||
}): Promise<boolean> {
|
||||
const { command, args = [], cwd, env, isDebugee } = options;
|
||||
const request = { command, args, cwd, env };
|
||||
this.emit("Process.requested", request);
|
||||
|
||||
let subprocess: ChildProcess;
|
||||
try {
|
||||
subprocess = spawn(command, args, {
|
||||
...request,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
} catch (cause) {
|
||||
this.emit("Process.exited", new Error("Failed to spawn process", { cause }), null);
|
||||
return false;
|
||||
}
|
||||
|
||||
subprocess.on("spawn", () => {
|
||||
this.emit("Process.spawned", subprocess);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = subprocess;
|
||||
this.emitAdapterEvent("process", {
|
||||
name: `${command} ${args.join(" ")}`,
|
||||
systemProcessId: subprocess.pid,
|
||||
isLocalProcess: true,
|
||||
startMethod: "launch",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.on("exit", (code, signal) => {
|
||||
this.emit("Process.exited", code, signal);
|
||||
|
||||
if (isDebugee) {
|
||||
this.#process = undefined;
|
||||
this.emitAdapterEvent("exited", {
|
||||
exitCode: code ?? -1,
|
||||
});
|
||||
this.emitAdapterEvent("terminated");
|
||||
}
|
||||
});
|
||||
|
||||
subprocess.stdout?.on("data", data => {
|
||||
this.emit("Process.stdout", data.toString());
|
||||
});
|
||||
|
||||
subprocess.stderr?.on("data", data => {
|
||||
this.emit("Process.stderr", data.toString());
|
||||
});
|
||||
|
||||
return new Promise(resolve => {
|
||||
subprocess.on("spawn", () => resolve(true));
|
||||
subprocess.on("exit", () => resolve(false));
|
||||
subprocess.on("error", () => resolve(false));
|
||||
});
|
||||
}
|
||||
|
||||
async attach(request: AttachRequest): Promise<void> {
|
||||
this.options = { ...request, type: "attach" };
|
||||
|
||||
try {
|
||||
await this.#attach(request);
|
||||
} catch (error) {
|
||||
// Some clients, like VSCode, will show a system-level popup when a `launch` request fails.
|
||||
// Instead, we want to show the error as a sidebar notification.
|
||||
const { message } = unknownToError(error);
|
||||
this.emitAdapterEvent("output", {
|
||||
category: "stderr",
|
||||
output: `Failed to start debugger.\n${message}`,
|
||||
});
|
||||
this.terminate();
|
||||
}
|
||||
}
|
||||
|
||||
async #attach(request: AttachRequest): Promise<boolean> {
|
||||
const { url } = request;
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const ok = await this.inspector.start(url);
|
||||
if (ok) {
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 100 * i));
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export const DebugAdapter = WebSocketDebugAdapter;
|
||||
|
||||
function stoppedReason(reason: JSC.Debugger.PausedEvent["reason"]): DAP.StoppedEvent["reason"] {
|
||||
switch (reason) {
|
||||
case "Breakpoint":
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
import type { Socket } from "node:net";
|
||||
const enum FramerState {
|
||||
WaitingForLength,
|
||||
WaitingForMessage,
|
||||
}
|
||||
|
||||
let socketFramerMessageLengthBuffer: Buffer;
|
||||
export class SocketFramer {
|
||||
state: FramerState = FramerState.WaitingForLength;
|
||||
pendingLength: number = 0;
|
||||
sizeBuffer: Buffer = Buffer.alloc(4);
|
||||
sizeBufferIndex: number = 0;
|
||||
bufferedData: Buffer = Buffer.alloc(0);
|
||||
socket: Socket;
|
||||
private onMessage: (message: string | string[]) => void;
|
||||
|
||||
constructor(socket: Socket, onMessage: (message: string | string[]) => void) {
|
||||
this.socket = socket;
|
||||
this.onMessage = onMessage;
|
||||
|
||||
if (!socketFramerMessageLengthBuffer) {
|
||||
socketFramerMessageLengthBuffer = Buffer.alloc(4);
|
||||
}
|
||||
|
||||
this.reset();
|
||||
}
|
||||
|
||||
reset(): void {
|
||||
this.state = FramerState.WaitingForLength;
|
||||
this.bufferedData = Buffer.alloc(0);
|
||||
this.sizeBufferIndex = 0;
|
||||
this.sizeBuffer = Buffer.alloc(4);
|
||||
}
|
||||
|
||||
send(data: string): void {
|
||||
socketFramerMessageLengthBuffer.writeUInt32BE(data.length, 0);
|
||||
this.socket.write(socketFramerMessageLengthBuffer);
|
||||
this.socket.write(data);
|
||||
}
|
||||
|
||||
onData(data: Buffer): void {
|
||||
this.bufferedData = this.bufferedData.length > 0 ? Buffer.concat([this.bufferedData, data]) : data;
|
||||
|
||||
let messagesToDeliver: string[] = [];
|
||||
let position = 0;
|
||||
|
||||
while (position < this.bufferedData.length) {
|
||||
// Need 4 bytes for the length
|
||||
if (this.bufferedData.length - position < 4) {
|
||||
break;
|
||||
}
|
||||
|
||||
// Read the length prefix
|
||||
const messageLength = this.bufferedData.readUInt32BE(position);
|
||||
|
||||
// Validate message length
|
||||
if (messageLength <= 0 || messageLength > 1024 * 1024) {
|
||||
// 1MB max
|
||||
// Try to resync by looking for the next valid message
|
||||
let newPosition = position + 1;
|
||||
let found = false;
|
||||
|
||||
while (newPosition < this.bufferedData.length - 4) {
|
||||
const testLength = this.bufferedData.readUInt32BE(newPosition);
|
||||
|
||||
if (testLength > 0 && testLength <= 1024 * 1024) {
|
||||
// Verify we can read the full message
|
||||
if (this.bufferedData.length - newPosition - 4 >= testLength) {
|
||||
const testMessage = this.bufferedData.toString("utf-8", newPosition + 4, newPosition + 4 + testLength);
|
||||
|
||||
if (testMessage.startsWith('{"')) {
|
||||
position = newPosition;
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
newPosition++;
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
// Couldn't find a valid message, discard buffer up to this point
|
||||
this.bufferedData = this.bufferedData.slice(position + 4);
|
||||
return;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have the complete message
|
||||
if (this.bufferedData.length - position - 4 < messageLength) {
|
||||
break;
|
||||
}
|
||||
|
||||
const message = this.bufferedData.toString("utf-8", position + 4, position + 4 + messageLength);
|
||||
if (message.startsWith('{"')) {
|
||||
messagesToDeliver.push(message);
|
||||
}
|
||||
|
||||
position += 4 + messageLength;
|
||||
}
|
||||
|
||||
if (position > 0) {
|
||||
this.bufferedData =
|
||||
position < this.bufferedData.length ? this.bufferedData.slice(position) : SocketFramer.emptyBuffer;
|
||||
}
|
||||
|
||||
if (messagesToDeliver.length === 1) {
|
||||
this.onMessage(messagesToDeliver[0]);
|
||||
} else if (messagesToDeliver.length > 1) {
|
||||
this.onMessage(messagesToDeliver);
|
||||
}
|
||||
}
|
||||
|
||||
private static emptyBuffer = Buffer.from([]);
|
||||
}
|
||||
@@ -11,6 +11,8 @@ export type UnixSignalEventMap = {
|
||||
"Signal.error": [Error];
|
||||
"Signal.received": [string];
|
||||
"Signal.closed": [];
|
||||
"Signal.Socket.closed": [socket: Socket];
|
||||
"Signal.Socket.connect": [socket: Socket];
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -21,7 +23,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
#server: Server;
|
||||
#ready: Promise<void>;
|
||||
|
||||
constructor(path?: string | URL) {
|
||||
constructor(path?: string | URL | undefined) {
|
||||
super();
|
||||
this.#path = path ? parseUnixPath(path) : randomUnixPath();
|
||||
this.#server = createServer();
|
||||
@@ -29,9 +31,13 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
this.#server.on("error", error => this.emit("Signal.error", error));
|
||||
this.#server.on("close", () => this.emit("Signal.closed"));
|
||||
this.#server.on("connection", socket => {
|
||||
this.emit("Signal.Socket.connect", socket);
|
||||
socket.on("data", data => {
|
||||
this.emit("Signal.received", data.toString());
|
||||
});
|
||||
socket.on("close", () => {
|
||||
this.emit("Signal.Socket.closed", socket);
|
||||
});
|
||||
});
|
||||
this.#ready = new Promise((resolve, reject) => {
|
||||
this.#server.on("listening", resolve);
|
||||
@@ -45,7 +51,7 @@ export class UnixSignal extends EventEmitter<UnixSignalEventMap> {
|
||||
console.log(event, ...args);
|
||||
}
|
||||
|
||||
return super.emit(event, ...args);
|
||||
return super.emit(event, ...(args as never));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -91,6 +97,8 @@ export type TCPSocketSignalEventMap = {
|
||||
"Signal.error": [Error];
|
||||
"Signal.closed": [];
|
||||
"Signal.received": [string];
|
||||
"Signal.Socket.closed": [socket: Socket];
|
||||
"Signal.Socket.connect": [socket: Socket];
|
||||
};
|
||||
|
||||
export class TCPSocketSignal extends EventEmitter {
|
||||
@@ -103,6 +111,8 @@ export class TCPSocketSignal extends EventEmitter {
|
||||
this.#port = port;
|
||||
|
||||
this.#server = createServer((socket: Socket) => {
|
||||
this.emit("Signal.Socket.connect", socket);
|
||||
|
||||
socket.on("data", data => {
|
||||
this.emit("Signal.received", data.toString());
|
||||
});
|
||||
@@ -112,10 +122,14 @@ export class TCPSocketSignal extends EventEmitter {
|
||||
});
|
||||
|
||||
socket.on("close", () => {
|
||||
this.emit("Signal.closed");
|
||||
this.emit("Signal.Socket.closed", socket);
|
||||
});
|
||||
});
|
||||
|
||||
this.#server.on("close", () => {
|
||||
this.emit("Signal.closed");
|
||||
});
|
||||
|
||||
this.#ready = new Promise((resolve, reject) => {
|
||||
this.#server.listen(this.#port, () => {
|
||||
this.emit("Signal.listening");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { expect, test } from "bun:test";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { SourceMap } from "./sourcemap";
|
||||
import { SourceMap } from "./sourcemap.js";
|
||||
|
||||
test("works without source map", () => {
|
||||
const sourceMap = getSourceMap("without-sourcemap.js");
|
||||
|
||||
@@ -21,7 +21,15 @@ export type Location = {
|
||||
);
|
||||
|
||||
export interface SourceMap {
|
||||
/**
|
||||
* Converts a location in the original source to a location in the generated source.
|
||||
* @param request A request
|
||||
*/
|
||||
generatedLocation(request: LocationRequest): Location;
|
||||
/**
|
||||
* Converts a location in the generated source to a location in the original source.
|
||||
* @param request A request
|
||||
*/
|
||||
originalLocation(request: LocationRequest): Location;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"module": "esnext",
|
||||
"module": "NodeNext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "nodenext",
|
||||
"moduleDetection": "force",
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
"composite": true,
|
||||
// "composite": true,
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
@@ -15,7 +15,7 @@
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/src"]
|
||||
"include": ["src", "scripts", "../bun-types/index.d.ts", "../bun-inspector-protocol/**/*.ts"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export type * from "./src/inspector";
|
||||
export * from "./src/inspector/websocket";
|
||||
export type * from "./src/protocol";
|
||||
export * from "./src/util/preview";
|
||||
export type * from "./src/inspector/index.js";
|
||||
export * from "./src/inspector/websocket.js";
|
||||
export type * from "./src/protocol/index.js";
|
||||
export * from "./src/util/preview.js";
|
||||
|
||||
@@ -1,26 +1,7 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
import { readFileSync, writeFileSync, realpathSync } from "node:fs";
|
||||
import type { Domain, Property, Protocol } from "../src/protocol/schema";
|
||||
|
||||
run().catch(console.error);
|
||||
|
||||
async function run() {
|
||||
const cwd = new URL("../src/protocol/", import.meta.url);
|
||||
const runner = "Bun" in globalThis ? "bunx" : "npx";
|
||||
const write = (name: string, data: string) => {
|
||||
const path = new URL(name, cwd);
|
||||
writeFileSync(path, data);
|
||||
spawnSync(runner, ["prettier", "--write", path.pathname], { cwd, stdio: "ignore" });
|
||||
};
|
||||
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
|
||||
const baseNoComments = base.replace(/\/\/.*/g, "");
|
||||
const jsc = await downloadJsc();
|
||||
write("jsc/protocol.json", JSON.stringify(jsc));
|
||||
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
|
||||
const v8 = await downloadV8();
|
||||
write("v8/protocol.json", JSON.stringify(v8));
|
||||
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));
|
||||
}
|
||||
import path from "node:path";
|
||||
|
||||
function formatProtocol(protocol: Protocol, extraTs?: string): string {
|
||||
const { name, domains } = protocol;
|
||||
@@ -29,6 +10,7 @@ function formatProtocol(protocol: Protocol, extraTs?: string): string {
|
||||
let body = `export namespace ${name} {`;
|
||||
for (const { domain, types = [], events = [], commands = [] } of domains) {
|
||||
body += `export namespace ${domain} {`;
|
||||
|
||||
for (const type of types) {
|
||||
body += formatProperty(type);
|
||||
}
|
||||
@@ -153,32 +135,12 @@ async function downloadV8(): Promise<Protocol> {
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* @link https://github.com/WebKit/WebKit/tree/main/Source/JavaScriptCore/inspector/protocol
|
||||
*/
|
||||
async function downloadJsc(): Promise<Protocol> {
|
||||
const baseUrl = "https://raw.githubusercontent.com/WebKit/WebKit/main/Source/JavaScriptCore/inspector/protocol";
|
||||
const domains = [
|
||||
"Runtime",
|
||||
"Console",
|
||||
"Debugger",
|
||||
"Heap",
|
||||
"ScriptProfiler",
|
||||
"CPUProfiler",
|
||||
"GenericTypes",
|
||||
"Network",
|
||||
"Inspector",
|
||||
];
|
||||
return {
|
||||
name: "JSC",
|
||||
version: {
|
||||
major: 1,
|
||||
minor: 3,
|
||||
},
|
||||
domains: await Promise.all(domains.map(domain => download<Domain>(`${baseUrl}/${domain}.json`))).then(domains =>
|
||||
domains.sort((a, b) => a.domain.localeCompare(b.domain)),
|
||||
),
|
||||
};
|
||||
async function getJSC(): Promise<Protocol> {
|
||||
let bunExecutable = Bun.which("bun-debug") || process.execPath;
|
||||
if (!bunExecutable) {
|
||||
throw new Error("bun-debug not found");
|
||||
}
|
||||
bunExecutable = realpathSync(bunExecutable);
|
||||
}
|
||||
|
||||
async function download<V>(url: string): Promise<V> {
|
||||
@@ -200,3 +162,39 @@ function toComment(description?: string): string {
|
||||
const lines = ["/**", ...description.split("\n").map(line => ` * ${line.trim()}`), "*/"];
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
const cwd = new URL("../src/protocol/", import.meta.url);
|
||||
const runner = "Bun" in globalThis ? "bunx" : "npx";
|
||||
const write = (name: string, data: string) => {
|
||||
const filePath = path.resolve(__dirname, "..", "src", "protocol", name);
|
||||
writeFileSync(filePath, data);
|
||||
spawnSync(runner, ["prettier", "--write", filePath], { cwd, stdio: "ignore" });
|
||||
};
|
||||
const base = readFileSync(new URL("protocol.d.ts", cwd), "utf-8");
|
||||
const baseNoComments = base.replace(/\/\/.*/g, "");
|
||||
|
||||
const jscJsonFile = path.resolve(__dirname, process.argv.at(-1) ?? "");
|
||||
let jscJSONFile;
|
||||
try {
|
||||
jscJSONFile = await Bun.file(jscJsonFile).json();
|
||||
} catch (error) {
|
||||
console.warn("Failed to read CombinedDomains.json from WebKit build. Is this a WebKit build from Bun?");
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const jsc = {
|
||||
name: "JSC",
|
||||
version: {
|
||||
major: 1,
|
||||
minor: 4,
|
||||
},
|
||||
domains: jscJSONFile.domains
|
||||
.filter(a => a.debuggableTypes?.includes?.("javascript"))
|
||||
.sort((a, b) => a.domain.localeCompare(b.domain)),
|
||||
};
|
||||
write("jsc/protocol.json", JSON.stringify(jsc, null, 2));
|
||||
write("jsc/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(jsc, baseNoComments));
|
||||
const v8 = await downloadV8();
|
||||
write("v8/protocol.json", JSON.stringify(v8));
|
||||
write("v8/index.d.ts", "// GENERATED - DO NOT EDIT\n" + formatProtocol(v8, baseNoComments));
|
||||
|
||||
235
packages/bun-inspector-protocol/src/inspector/node-socket.ts
Normal file
235
packages/bun-inspector-protocol/src/inspector/node-socket.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import { Socket } from "node:net";
|
||||
import { SocketFramer } from "../../../bun-debug-adapter-protocol/src/debugger/node-socket-framer.js";
|
||||
import type { JSC } from "../protocol";
|
||||
import type { Inspector, InspectorEventMap } from "./index";
|
||||
|
||||
/**
|
||||
* An inspector that communicates with a debugger over a (unix) socket.
|
||||
* This is used in the extension as follows:
|
||||
*
|
||||
* 1. Extension sets environment variable `BUN_INSPECT_NOTIFY` inside of all vscode terminals.
|
||||
* This is a path to a unix socket that the extension will listen on.
|
||||
* 2. Bun reads it and connects to the socket, setting up a reverse connection for sending DAP
|
||||
* messages.
|
||||
*/
|
||||
export class NodeSocketInspector extends EventEmitter<InspectorEventMap> implements Inspector {
|
||||
#ready: Promise<boolean> | undefined;
|
||||
#socket: Socket;
|
||||
#requestId: number;
|
||||
#pendingRequests: JSC.Request[];
|
||||
#pendingResponses: Map<
|
||||
number,
|
||||
{
|
||||
request: JSC.Request;
|
||||
done: (result: unknown) => void;
|
||||
}
|
||||
>;
|
||||
#framer: SocketFramer;
|
||||
|
||||
constructor(socket: Socket) {
|
||||
super();
|
||||
this.#socket = socket;
|
||||
this.#requestId = 1;
|
||||
this.#pendingRequests = [];
|
||||
this.#pendingResponses = new Map();
|
||||
|
||||
this.#framer = new SocketFramer(socket, message => {
|
||||
if (Array.isArray(message)) {
|
||||
for (const m of message) {
|
||||
this.#accept(m);
|
||||
}
|
||||
} else {
|
||||
this.#accept(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private onConnectOrImmediately(cb: () => void) {
|
||||
const isAlreadyConnected = this.#socket.connecting === false;
|
||||
|
||||
if (isAlreadyConnected) {
|
||||
cb();
|
||||
} else {
|
||||
this.#socket.once("connect", cb);
|
||||
}
|
||||
}
|
||||
|
||||
async start(): Promise<boolean> {
|
||||
if (this.#ready) {
|
||||
return this.#ready;
|
||||
}
|
||||
|
||||
if (this.closed) {
|
||||
this.close();
|
||||
const addressWithPort = this.#socket.remoteAddress + ":" + this.#socket.remotePort;
|
||||
this.emit("Inspector.connecting", addressWithPort);
|
||||
}
|
||||
|
||||
const socket = this.#socket;
|
||||
|
||||
this.onConnectOrImmediately(() => {
|
||||
this.emit("Inspector.connected");
|
||||
|
||||
for (let i = 0; i < this.#pendingRequests.length; i++) {
|
||||
const request = this.#pendingRequests[i];
|
||||
|
||||
if (this.#send(request)) {
|
||||
this.emit("Inspector.request", request);
|
||||
} else {
|
||||
this.#pendingRequests = this.#pendingRequests.slice(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
socket.on("data", data => this.#framer.onData(data));
|
||||
|
||||
socket.on("error", error => {
|
||||
this.#close(unknownToError(error));
|
||||
});
|
||||
|
||||
socket.on("close", hadError => {
|
||||
if (hadError) {
|
||||
this.#close(new Error("Socket closed due to a transmission error"));
|
||||
} else {
|
||||
this.#close();
|
||||
}
|
||||
});
|
||||
|
||||
const ready = new Promise<boolean>(resolve => {
|
||||
if (socket.connecting) {
|
||||
socket.on("connect", () => resolve(true));
|
||||
} else {
|
||||
resolve(true);
|
||||
}
|
||||
socket.on("close", () => resolve(false));
|
||||
socket.on("error", () => resolve(false));
|
||||
}).finally(() => {
|
||||
this.#ready = undefined;
|
||||
});
|
||||
|
||||
this.#ready = ready;
|
||||
|
||||
return ready;
|
||||
}
|
||||
|
||||
send<M extends keyof JSC.RequestMap & keyof JSC.ResponseMap>(
|
||||
method: M,
|
||||
params?: JSC.RequestMap[M] | undefined,
|
||||
): Promise<JSC.ResponseMap[M]> {
|
||||
const id = this.#requestId++;
|
||||
const request = {
|
||||
id,
|
||||
method,
|
||||
params: params ?? {},
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let timerId: number | undefined;
|
||||
const done = (result: any) => {
|
||||
this.#pendingResponses.delete(id);
|
||||
if (timerId) {
|
||||
clearTimeout(timerId);
|
||||
}
|
||||
if (result instanceof Error) {
|
||||
reject(result);
|
||||
} else {
|
||||
resolve(result);
|
||||
}
|
||||
};
|
||||
|
||||
this.#pendingResponses.set(id, {
|
||||
request: request,
|
||||
done: done,
|
||||
});
|
||||
|
||||
if (this.#send(request)) {
|
||||
timerId = +setTimeout(() => done(new Error(`Timed out: ${method}`)), 10_000);
|
||||
this.emit("Inspector.request", request);
|
||||
} else {
|
||||
this.emit("Inspector.pendingRequest", request);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#send(request: JSC.Request): boolean {
|
||||
this.#framer.send(JSON.stringify(request));
|
||||
|
||||
if (!this.#pendingRequests.includes(request)) {
|
||||
this.#pendingRequests.push(request);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#accept(message: string): void {
|
||||
let data: JSC.Event | JSC.Response;
|
||||
try {
|
||||
data = JSON.parse(message);
|
||||
} catch (cause) {
|
||||
this.emit("Inspector.error", new Error(`Failed to parse message: ${message}`, { cause }));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!("id" in data)) {
|
||||
this.emit("Inspector.event", data);
|
||||
const { method, params } = data;
|
||||
this.emit(method, params);
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit("Inspector.response", data);
|
||||
|
||||
const { id } = data;
|
||||
const handle = this.#pendingResponses.get(id);
|
||||
if (!handle) {
|
||||
this.emit("Inspector.error", new Error(`Failed to find matching request for ID: ${id}`));
|
||||
return;
|
||||
}
|
||||
|
||||
if ("error" in data) {
|
||||
const { error } = data;
|
||||
const { message } = error;
|
||||
handle.done(new Error(message));
|
||||
} else {
|
||||
const { result } = data;
|
||||
handle.done(result);
|
||||
}
|
||||
}
|
||||
|
||||
get closed(): boolean {
|
||||
return !this.#socket.writable;
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.#socket?.end();
|
||||
}
|
||||
|
||||
#close(error?: Error): void {
|
||||
for (const handle of this.#pendingResponses.values()) {
|
||||
handle.done(error ?? new Error("Socket closed while waiting for: " + handle.request.method));
|
||||
}
|
||||
|
||||
this.#pendingResponses.clear();
|
||||
|
||||
if (error) {
|
||||
this.emit("Inspector.error", error);
|
||||
}
|
||||
|
||||
this.emit("Inspector.disconnected", error);
|
||||
}
|
||||
}
|
||||
|
||||
function unknownToError(input: unknown): Error {
|
||||
if (input instanceof Error) {
|
||||
return input;
|
||||
}
|
||||
|
||||
if (typeof input === "object" && input !== null && "message" in input) {
|
||||
const { message } = input;
|
||||
return new Error(`${message}`);
|
||||
}
|
||||
|
||||
return new Error(`${input}`);
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { EventEmitter } from "node:events";
|
||||
import { WebSocket } from "ws";
|
||||
import type { Inspector, InspectorEventMap } from ".";
|
||||
import type { JSC } from "../protocol";
|
||||
import type { Inspector, InspectorEventMap } from "./index";
|
||||
|
||||
/**
|
||||
* An inspector that communicates with a debugger over a WebSocket.
|
||||
@@ -170,6 +170,7 @@ export class WebSocketInspector extends EventEmitter<InspectorEventMap> implemen
|
||||
|
||||
#accept(message: string): void {
|
||||
let data: JSC.Event | JSC.Response;
|
||||
|
||||
try {
|
||||
data = JSON.parse(message);
|
||||
} catch (cause) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
"lib": ["ESNext"],
|
||||
"module": "ESNext",
|
||||
"target": "ESNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"moduleDetection": "force",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
@@ -12,7 +12,7 @@
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"inlineSourceMap": true,
|
||||
"allowJs": true,
|
||||
"outDir": "dist",
|
||||
"outDir": "dist"
|
||||
},
|
||||
"include": [".", "../bun-types/index.d.ts"]
|
||||
}
|
||||
|
||||
4
packages/bun-types/.gitignore
vendored
4
packages/bun-types/.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
node_modules/
|
||||
dist/
|
||||
dist/
|
||||
docs/
|
||||
*.tgz
|
||||
10
packages/bun-types/bun.d.ts
vendored
10
packages/bun-types/bun.d.ts
vendored
@@ -3873,7 +3873,6 @@ declare module "bun" {
|
||||
* The default loader for this file extension
|
||||
*/
|
||||
loader: Loader;
|
||||
|
||||
/**
|
||||
* Defer the execution of this callback until all other modules have been parsed.
|
||||
*
|
||||
@@ -3899,6 +3898,10 @@ declare module "bun" {
|
||||
* The namespace of the importer.
|
||||
*/
|
||||
namespace: string;
|
||||
/**
|
||||
* The directory to perform file-based resolutions in.
|
||||
*/
|
||||
resolveDir: string;
|
||||
/**
|
||||
* The kind of import this resolve is for.
|
||||
*/
|
||||
@@ -4534,6 +4537,11 @@ declare module "bun" {
|
||||
unix: string;
|
||||
}
|
||||
|
||||
interface FdSocketOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
tls?: TLSOptions;
|
||||
fd: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a TCP client that connects to a server
|
||||
*
|
||||
|
||||
@@ -3,14 +3,16 @@
|
||||
"license": "MIT",
|
||||
"main": "",
|
||||
"types": "index.d.ts",
|
||||
"description": "Type definitions for Bun, an incredibly fast JavaScript runtime",
|
||||
"description": "Type definitions and documentation for Bun, an incredibly fast JavaScript runtime",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/oven-sh/bun",
|
||||
"directory": "packages/bun-types"
|
||||
},
|
||||
"files": [
|
||||
"*.d.ts"
|
||||
"*.d.ts",
|
||||
"docs/**/*.md",
|
||||
"docs/*.md"
|
||||
],
|
||||
"homepage": "https://bun.sh",
|
||||
"dependencies": {
|
||||
@@ -25,7 +27,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "echo $(pwd)",
|
||||
"build": "bun scripts/build.ts && bun run fmt",
|
||||
"copy-docs": "rm -rf docs && cp -r ../../docs/ ./docs",
|
||||
"build": "bun run copy-docs && bun scripts/build.ts && bun run fmt",
|
||||
"test": "tsc",
|
||||
"fmt": "echo $(which biome) && biome format --write ."
|
||||
},
|
||||
|
||||
6
packages/bun-types/test.d.ts
vendored
6
packages/bun-types/test.d.ts
vendored
@@ -387,9 +387,9 @@ declare module "bun:test" {
|
||||
/**
|
||||
* Marks this test as to be written or to be fixed.
|
||||
*
|
||||
* When a test function is passed, it will be marked as `todo` in the test results
|
||||
* as long the test does not pass. When the test passes, the test will be marked as
|
||||
* `fail` in the results; you will have to remove the `.todo` or check that your test
|
||||
* These tests will not be executed unless the `--todo` flag is passed. With the flag,
|
||||
* if the test passes, the test will be marked as `fail` in the results; you will have to
|
||||
* remove the `.todo` or check that your test
|
||||
* is implemented correctly.
|
||||
*
|
||||
* @param label the label for the test
|
||||
|
||||
@@ -18,8 +18,7 @@ const __filename = fileURLToPath(import.meta.url);
|
||||
const now = new Date();
|
||||
|
||||
const formatDate = d => {
|
||||
const iso = d.toISOString();
|
||||
return iso.substring(0, iso.indexOf("T"));
|
||||
return d;
|
||||
};
|
||||
|
||||
const getCertdataURL = version => {
|
||||
@@ -146,26 +145,35 @@ if (values.help) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const scheduleURL = "https://wiki.mozilla.org/NSS:Release_Versions";
|
||||
if (values.verbose) {
|
||||
console.log(`Fetching NSS release schedule from ${scheduleURL}`);
|
||||
}
|
||||
const schedule = await fetch(scheduleURL);
|
||||
if (!schedule.ok) {
|
||||
console.error(`Failed to fetch ${scheduleURL}: ${schedule.status}: ${schedule.statusText}`);
|
||||
process.exit(-1);
|
||||
}
|
||||
const scheduleText = await schedule.text();
|
||||
const nssReleases = getReleases(scheduleText);
|
||||
const versions = await fetch("https://nucleus.mozilla.org/rna/all-releases.json").then(res => res.json());
|
||||
|
||||
// Retrieve metadata for the NSS release being updated to.
|
||||
const version = positionals[0] ?? (await getLatestVersion(nssReleases));
|
||||
const release = nssReleases.find(r => {
|
||||
return new RegExp(`^${version.replace(".", "\\.")}\\b`).test(r[kNSSVersion]);
|
||||
});
|
||||
if (!pastRelease(release)) {
|
||||
console.warn(`Warning: NSS ${version} is not due to be released until ${formatDate(release[kNSSDate])}`);
|
||||
const today = new Date().toISOString().split("T")[0].trim();
|
||||
const releases = versions
|
||||
.filter(
|
||||
version =>
|
||||
version.channel == "Release" &&
|
||||
version.product === "Firefox" &&
|
||||
version.is_public &&
|
||||
version.release_date <= today,
|
||||
)
|
||||
.sort((a, b) => (a > b ? (a == b ? 0 : -1) : 1));
|
||||
const latest = releases[0];
|
||||
const release_tag = `FIREFOX_${latest.version.replaceAll(".", "_")}_RELEASE`;
|
||||
if (values.verbose) {
|
||||
console.log(`Fetching NSS release from ${release_tag}`);
|
||||
}
|
||||
const version = await fetch(
|
||||
`https://hg.mozilla.org/releases/mozilla-release/raw-file/${release_tag}/security/nss/TAG-INFO`,
|
||||
)
|
||||
.then(res => res.text())
|
||||
.then(txt => txt.trim().split("NSS_")[1].split("_RTM").join("").split("_").join(".").trim());
|
||||
|
||||
const release = {
|
||||
version: version,
|
||||
firefoxVersion: latest.version,
|
||||
firefoxDate: latest.release_date,
|
||||
date: latest.release_date,
|
||||
};
|
||||
if (values.verbose) {
|
||||
console.log("Found NSS version:");
|
||||
console.log(release);
|
||||
|
||||
@@ -623,18 +623,34 @@ inline __attribute__((always_inline)) LIBUS_SOCKET_DESCRIPTOR bsd_bind_listen_fd
|
||||
setsockopt(listenFd, SOL_SOCKET, SO_EXCLUSIVEADDRUSE, (void *) &optval2, sizeof(optval2));
|
||||
#endif
|
||||
} else {
|
||||
#if defined(SO_REUSEPORT)
|
||||
int optval2 = 1;
|
||||
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval2, sizeof(optval2));
|
||||
#endif
|
||||
#if defined(SO_REUSEPORT)
|
||||
if((options & LIBUS_LISTEN_REUSE_PORT)) {
|
||||
int optval2 = 1;
|
||||
setsockopt(listenFd, SOL_SOCKET, SO_REUSEPORT, (void *) &optval2, sizeof(optval2));
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#if defined(SO_REUSEADDR)
|
||||
#ifndef _WIN32
|
||||
|
||||
// Unlike on Unix, here we don't set SO_REUSEADDR, because it doesn't just
|
||||
// allow binding to addresses that are in use by sockets in TIME_WAIT, it
|
||||
// effectively allows 'stealing' a port which is in use by another application.
|
||||
// See libuv issue #1360.
|
||||
|
||||
|
||||
int optval3 = 1;
|
||||
setsockopt(listenFd, SOL_SOCKET, SO_REUSEADDR, (void *) &optval3, sizeof(optval3));
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef IPV6_V6ONLY
|
||||
// TODO: revise support to match node.js
|
||||
// if (listenAddr->ai_family == AF_INET6) {
|
||||
// int disabled = (options & LIBUS_SOCKET_IPV6_ONLY) != 0;
|
||||
// setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
|
||||
// }
|
||||
int disabled = 0;
|
||||
setsockopt(listenFd, IPPROTO_IPV6, IPV6_V6ONLY, (void *) &disabled, sizeof(disabled));
|
||||
#endif
|
||||
|
||||
@@ -201,7 +201,7 @@ struct loop_ssl_data * us_internal_set_loop_ssl_data(struct us_internal_ssl_sock
|
||||
|
||||
struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
int is_client, char *ip,
|
||||
int ip_length) {
|
||||
int ip_length, const char* sni) {
|
||||
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -231,6 +231,10 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
if (is_client) {
|
||||
SSL_set_renegotiate_mode(s->ssl, ssl_renegotiate_explicit);
|
||||
SSL_set_connect_state(s->ssl);
|
||||
|
||||
if (sni) {
|
||||
SSL_set_tlsext_host_name(s->ssl, sni);
|
||||
}
|
||||
} else {
|
||||
SSL_set_accept_state(s->ssl);
|
||||
// we do not allow renegotiation on the server side (should be the default for BoringSSL, but we set to make openssl compatible)
|
||||
@@ -1603,6 +1607,10 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
|
||||
socket_ext_size);
|
||||
}
|
||||
|
||||
static void ssl_on_open_without_sni(struct us_internal_ssl_socket_t *s, int is_client, char *ip, int ip_length) {
|
||||
ssl_on_open(s, is_client, ip, ip_length, NULL);
|
||||
}
|
||||
|
||||
void us_internal_ssl_socket_context_on_open(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
struct us_internal_ssl_socket_t *(*on_open)(
|
||||
@@ -1611,7 +1619,7 @@ void us_internal_ssl_socket_context_on_open(
|
||||
us_socket_context_on_open(
|
||||
0, &context->sc,
|
||||
(struct us_socket_t * (*)(struct us_socket_t *, int, char *, int))
|
||||
ssl_on_open);
|
||||
ssl_on_open_without_sni);
|
||||
context->on_open = on_open;
|
||||
}
|
||||
|
||||
@@ -1850,9 +1858,6 @@ ssl_wrapped_context_on_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
|
||||
if (wrapped_context->events.on_close) {
|
||||
wrapped_context->events.on_close((struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
// writting here can cause the context to not be writable anymore but its the
|
||||
// user responsability to check for that
|
||||
@@ -1860,6 +1865,10 @@ ssl_wrapped_context_on_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
wrapped_context->old_events.on_close((struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
if (wrapped_context->events.on_close) {
|
||||
wrapped_context->events.on_close((struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
us_socket_context_unref(0, wrapped_context->tcp_context);
|
||||
return s;
|
||||
}
|
||||
@@ -1872,9 +1881,6 @@ ssl_wrapped_context_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
|
||||
if (wrapped_context->events.on_writable) {
|
||||
wrapped_context->events.on_writable((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
// writting here can cause the context to not be writable anymore but its the
|
||||
// user responsability to check for that
|
||||
@@ -1882,6 +1888,10 @@ ssl_wrapped_context_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
wrapped_context->old_events.on_writable((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->events.on_writable) {
|
||||
wrapped_context->events.on_writable((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -1908,14 +1918,14 @@ ssl_wrapped_context_on_timeout(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_wrapped_socket_context_t *wrapped_context =
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
if (wrapped_context->old_events.on_timeout) {
|
||||
wrapped_context->old_events.on_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->events.on_timeout) {
|
||||
wrapped_context->events.on_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->old_events.on_timeout) {
|
||||
wrapped_context->old_events.on_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
@@ -1927,15 +1937,14 @@ ssl_wrapped_context_on_long_timeout(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_wrapped_socket_context_t *wrapped_context =
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
if (wrapped_context->old_events.on_long_timeout) {
|
||||
wrapped_context->old_events.on_long_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->events.on_long_timeout) {
|
||||
wrapped_context->events.on_long_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->old_events.on_long_timeout) {
|
||||
wrapped_context->old_events.on_long_timeout((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -1946,14 +1955,13 @@ ssl_wrapped_context_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_wrapped_socket_context_t *wrapped_context =
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
|
||||
if (wrapped_context->events.on_end) {
|
||||
wrapped_context->events.on_end((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
if (wrapped_context->old_events.on_end) {
|
||||
wrapped_context->old_events.on_end((struct us_socket_t *)s);
|
||||
}
|
||||
if (wrapped_context->events.on_end) {
|
||||
wrapped_context->events.on_end((struct us_socket_t *)s);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -1965,13 +1973,13 @@ ssl_wrapped_on_connect_error(struct us_internal_ssl_socket_t *s, int code) {
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
|
||||
if (wrapped_context->old_events.on_connect_error) {
|
||||
wrapped_context->old_events.on_connect_error((struct us_connecting_socket_t *)s, code);
|
||||
}
|
||||
if (wrapped_context->events.on_connect_error) {
|
||||
wrapped_context->events.on_connect_error((struct us_connecting_socket_t *)s, code);
|
||||
}
|
||||
|
||||
if (wrapped_context->old_events.on_connect_error) {
|
||||
wrapped_context->old_events.on_connect_error((struct us_connecting_socket_t *)s, code);
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -1982,14 +1990,14 @@ ssl_wrapped_on_socket_connect_error(struct us_internal_ssl_socket_t *s, int code
|
||||
struct us_wrapped_socket_context_t *wrapped_context =
|
||||
(struct us_wrapped_socket_context_t *)us_internal_ssl_socket_context_ext(
|
||||
context);
|
||||
|
||||
if (wrapped_context->old_events.on_connecting_socket_error) {
|
||||
wrapped_context->old_events.on_connecting_socket_error((struct us_socket_t *)s, code);
|
||||
}
|
||||
if (wrapped_context->events.on_connecting_socket_error) {
|
||||
wrapped_context->events.on_connecting_socket_error((struct us_socket_t *)s, code);
|
||||
}
|
||||
|
||||
if (wrapped_context->old_events.on_connecting_socket_error) {
|
||||
wrapped_context->old_events.on_connecting_socket_error((struct us_socket_t *)s, code);
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -2005,7 +2013,30 @@ us_internal_ssl_socket_open(struct us_internal_ssl_socket_t *s, int is_client,
|
||||
return s;
|
||||
|
||||
// start SSL open
|
||||
return ssl_on_open(s, is_client, ip, ip_length);
|
||||
return ssl_on_open(s, is_client, ip, ip_length, NULL);
|
||||
}
|
||||
|
||||
struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r new_context, const char *sni) {
|
||||
// Resize to tls + ext size
|
||||
void** prev_ext_ptr = (void**)us_socket_ext(0, s);
|
||||
void* prev_ext = *prev_ext_ptr;
|
||||
struct us_internal_ssl_socket_t *socket =
|
||||
(struct us_internal_ssl_socket_t *)us_socket_context_adopt_socket(
|
||||
0, new_context, s,
|
||||
(sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t)) + sizeof(void*));
|
||||
socket->ssl = NULL;
|
||||
socket->ssl_write_wants_read = 0;
|
||||
socket->ssl_read_wants_write = 0;
|
||||
socket->fatal_error = 0;
|
||||
socket->handshake_state = HANDSHAKE_PENDING;
|
||||
|
||||
void** new_ext_ptr = (void**)us_socket_ext(1, (struct us_socket_t *)socket);
|
||||
*new_ext_ptr = prev_ext;
|
||||
|
||||
ssl_on_open(socket, 1, NULL, 0, sni);
|
||||
|
||||
|
||||
return (struct us_socket_t *)socket;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
|
||||
@@ -7,12 +7,6 @@
|
||||
#include <openssl/x509.h>
|
||||
#include <string.h>
|
||||
static const int root_certs_size = sizeof(root_certs) / sizeof(root_certs[0]);
|
||||
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
|
||||
NULL};
|
||||
static X509 *root_extra_cert_instances = {NULL};
|
||||
|
||||
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
|
||||
static std::atomic_bool root_cert_instances_initialized = 0;
|
||||
|
||||
// This callback is used to avoid the default passphrase callback in OpenSSL
|
||||
// which will typically prompt for the passphrase. The prompting is designed
|
||||
@@ -78,7 +72,9 @@ end:
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static void us_internal_init_root_certs() {
|
||||
static void us_internal_init_root_certs(X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])], X509 *&root_extra_cert_instances) {
|
||||
static std::atomic_flag root_cert_instances_lock = ATOMIC_FLAG_INIT;
|
||||
static std::atomic_bool root_cert_instances_initialized = 0;
|
||||
if (std::atomic_load(&root_cert_instances_initialized) == 1)
|
||||
return;
|
||||
|
||||
@@ -123,7 +119,11 @@ extern "C" X509_STORE *us_get_default_ca_store() {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
us_internal_init_root_certs();
|
||||
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
|
||||
NULL};
|
||||
static X509 *root_extra_cert_instances = NULL;
|
||||
|
||||
us_internal_init_root_certs(root_cert_instances, root_extra_cert_instances);
|
||||
|
||||
// load all root_cert_instances on the default ca store
|
||||
for (size_t i = 0; i < root_certs_size; i++) {
|
||||
|
||||
@@ -96,6 +96,10 @@ enum {
|
||||
LIBUS_LISTEN_EXCLUSIVE_PORT = 1,
|
||||
/* Allow socket to keep writing after readable side closes */
|
||||
LIBUS_SOCKET_ALLOW_HALF_OPEN = 2,
|
||||
/* Setting reusePort allows multiple sockets on the same host to bind to the same port. Incoming connections are distributed by the operating system to listening sockets. This option is available only on some platforms, such as Linux 3.9+, DragonFlyBSD 3.6+, FreeBSD 12.0+, Solaris 11.4, and AIX 7.2.5+*/
|
||||
LIBUS_LISTEN_REUSE_PORT = 4,
|
||||
/* etting ipv6Only will disable dual-stack support, i.e., binding to host :: won't make 0.0.0.0 be bound.*/
|
||||
LIBUS_SOCKET_IPV6_ONLY = 8,
|
||||
};
|
||||
|
||||
/* Library types publicly available */
|
||||
@@ -190,7 +194,7 @@ struct us_socket_context_options_t {
|
||||
};
|
||||
|
||||
struct us_bun_verify_error_t {
|
||||
long error;
|
||||
int error;
|
||||
const char* code;
|
||||
const char* reason;
|
||||
};
|
||||
@@ -338,6 +342,8 @@ struct us_loop_t *us_socket_context_loop(int ssl, us_socket_context_r context) n
|
||||
* Used mainly for "socket upgrades" such as when transitioning from HTTP to WebSocket. */
|
||||
struct us_socket_t *us_socket_context_adopt_socket(int ssl, us_socket_context_r context, us_socket_r s, int ext_size);
|
||||
|
||||
struct us_socket_t *us_socket_upgrade_to_tls(us_socket_r s, us_socket_context_r new_context, const char *sni);
|
||||
|
||||
/* Create a child socket context which acts much like its own socket context with its own callbacks yet still relies on the
|
||||
* parent socket context for some shared resources. Child socket contexts should be used together with socket adoptions and nothing else. */
|
||||
struct us_socket_context_t *us_create_child_socket_context(int ssl, us_socket_context_r context, int context_ext_size);
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
#include <sys/ioctl.h>
|
||||
#endif
|
||||
|
||||
|
||||
/* The loop has 2 fallthrough polls */
|
||||
void us_internal_loop_data_init(struct us_loop_t *loop, void (*wakeup_cb)(struct us_loop_t *loop),
|
||||
void (*pre_cb)(struct us_loop_t *loop), void (*post_cb)(struct us_loop_t *loop)) {
|
||||
|
||||
@@ -613,7 +613,9 @@ namespace uWS
|
||||
* ought to be handled as an error. */
|
||||
std::string_view transferEncodingString = req->getHeader("transfer-encoding");
|
||||
std::string_view contentLengthString = req->getHeader("content-length");
|
||||
if (transferEncodingString.length() && contentLengthString.length()) {
|
||||
auto transferEncodingStringLen = transferEncodingString.length();
|
||||
auto contentLengthStringLen = contentLengthString.length();
|
||||
if (transferEncodingStringLen && contentLengthStringLen) {
|
||||
/* Returning fullptr is the same as calling the errorHandler */
|
||||
/* We could be smart and set an error in the context along with this, to indicate what
|
||||
* http error response we might want to return */
|
||||
@@ -623,6 +625,15 @@ namespace uWS
|
||||
/* Parse query */
|
||||
const char *querySeparatorPtr = (const char *) memchr(req->headers->value.data(), '?', req->headers->value.length());
|
||||
req->querySeparator = (unsigned int) ((querySeparatorPtr ? querySeparatorPtr : req->headers->value.data() + req->headers->value.length()) - req->headers->value.data());
|
||||
|
||||
// lets check if content len is valid before calling requestHandler
|
||||
if(contentLengthStringLen) {
|
||||
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
|
||||
if (remainingStreamingBytes == UINT64_MAX) {
|
||||
/* Parser error */
|
||||
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
|
||||
}
|
||||
}
|
||||
|
||||
/* If returned socket is not what we put in we need
|
||||
* to break here as we either have upgraded to
|
||||
@@ -642,7 +653,7 @@ namespace uWS
|
||||
/* RFC 9112 6.3
|
||||
* If a message is received with both a Transfer-Encoding and a Content-Length header field,
|
||||
* the Transfer-Encoding overrides the Content-Length. */
|
||||
if (transferEncodingString.length()) {
|
||||
if (transferEncodingStringLen) {
|
||||
|
||||
/* If a proxy sent us the transfer-encoding header that 100% means it must be chunked or else the proxy is
|
||||
* not RFC 9112 compliant. Therefore it is always better to assume this is the case, since that entirely eliminates
|
||||
@@ -665,6 +676,7 @@ namespace uWS
|
||||
dataHandler(user, chunk, chunk.length() == 0);
|
||||
}
|
||||
if (isParsingInvalidChunkedEncoding(remainingStreamingBytes)) {
|
||||
// TODO: what happen if we already responded?
|
||||
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
|
||||
}
|
||||
unsigned int consumed = (length - (unsigned int) dataToConsume.length());
|
||||
@@ -672,13 +684,8 @@ namespace uWS
|
||||
length = (unsigned int) dataToConsume.length();
|
||||
consumedTotal += consumed;
|
||||
}
|
||||
} else if (contentLengthString.length()) {
|
||||
remainingStreamingBytes = toUnsignedInteger(contentLengthString);
|
||||
if (remainingStreamingBytes == UINT64_MAX) {
|
||||
/* Parser error */
|
||||
return {HTTP_ERROR_400_BAD_REQUEST, FULLPTR};
|
||||
}
|
||||
|
||||
} else if (contentLengthStringLen) {
|
||||
|
||||
if (!CONSUME_MINIMALLY) {
|
||||
unsigned int emittable = (unsigned int) std::min<uint64_t>(remainingStreamingBytes, length);
|
||||
dataHandler(user, std::string_view(data, emittable), emittable == remainingStreamingBytes);
|
||||
|
||||
@@ -19,6 +19,26 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
<br/>
|
||||
</div>
|
||||
|
||||
## Features:
|
||||
|
||||
- Live in-editor error messages (gif below)
|
||||
- Test runner codelens
|
||||
- Debugger support
|
||||
- Run scripts from package.json
|
||||
- Visual lockfile viewer (`bun.lockb`)
|
||||
|
||||
## In-editor error messages
|
||||
|
||||
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
|
||||
|
||||

|
||||
|
||||
<div align="center">
|
||||
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
|
||||
</div>
|
||||
|
||||
Errors are cleared whenever you start typing, or whenever the extension detects that Bun just started running (or reloading) a new program.
|
||||
|
||||
## Configuration
|
||||
|
||||
### `.vscode/launch.json`
|
||||
@@ -75,8 +95,8 @@ You can use the following configurations to debug JavaScript and TypeScript file
|
||||
// The URL of the WebSocket inspector to attach to.
|
||||
// This value can be retrieved by using `bun --inspect`.
|
||||
"url": "ws://localhost:6499/",
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
@@ -91,8 +111,11 @@ You can use the following configurations to customize the behavior of the Bun ex
|
||||
|
||||
// If support for Bun should be added to the default "JavaScript Debug Terminal".
|
||||
"bun.debugTerminal.enabled": true,
|
||||
|
||||
|
||||
// If the debugger should stop on the first line of the program.
|
||||
"bun.debugTerminal.stopOnEntry": false,
|
||||
|
||||
// Glob pattern to find test files. Defaults to the value shown below.
|
||||
"bun.test.filePattern": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
}
|
||||
```
|
||||
```
|
||||
|
||||
BIN
packages/bun-vscode/error-messages.gif
Normal file
BIN
packages/bun-vscode/error-messages.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 462 KiB |
3
packages/bun-vscode/example/.gitignore
vendored
Normal file
3
packages/bun-vscode/example/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.bake-debug
|
||||
dist
|
||||
node_modules
|
||||
6
packages/bun-vscode/example/bake-test/bun.app.ts
Normal file
6
packages/bun-vscode/example/bake-test/bun.app.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
port: 3000,
|
||||
app: {
|
||||
framework: "react",
|
||||
},
|
||||
};
|
||||
10
packages/bun-vscode/example/bake-test/pages/_layout.tsx
Normal file
10
packages/bun-vscode/example/bake-test/pages/_layout.tsx
Normal file
@@ -0,0 +1,10 @@
|
||||
import { PropsWithChildren } from "react";
|
||||
|
||||
export default function Layout({ children }: PropsWithChildren) {
|
||||
return (
|
||||
<div>
|
||||
{children}
|
||||
<footer>some rights reserved - {new Date().toString()}</footer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
17
packages/bun-vscode/example/bake-test/pages/index.tsx
Normal file
17
packages/bun-vscode/example/bake-test/pages/index.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
"use client";
|
||||
|
||||
|
||||
import { useState } from "react";
|
||||
|
||||
function App() {
|
||||
const [count, setCount] = useState(null);
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* @ts-expect-error */}
|
||||
<button onClick={() => setCount(count => count.charAt(0))}>count is {count}</button>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default App;
|
||||
3
packages/bun-vscode/example/bake-test/pages/two.tsx
Normal file
3
packages/bun-vscode/example/bake-test/pages/two.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
export default function Two() {
|
||||
return <p>Wow a second page! Bake is groundbreaking</p>;
|
||||
}
|
||||
1
packages/bun-vscode/example/bug-preload.js
Normal file
1
packages/bun-vscode/example/bug-preload.js
Normal file
@@ -0,0 +1 @@
|
||||
Math.max = undefined;
|
||||
Binary file not shown.
@@ -3,10 +3,13 @@ import { describe, expect, test } from "bun:test";
|
||||
describe("example", () => {
|
||||
test("it works", () => {
|
||||
expect(1).toBe(1);
|
||||
expect(1).not.toBe(2);
|
||||
|
||||
expect(10).toBe(10);
|
||||
|
||||
expect(() => {
|
||||
throw new TypeError("Oops! I did it again.");
|
||||
}).toThrow();
|
||||
|
||||
expect(() => {
|
||||
throw new Error("Parent error.", {
|
||||
cause: new TypeError("Child error."),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
type OS = "Windows";
|
||||
import * as os from "node:os";
|
||||
|
||||
Bun.serve({
|
||||
fetch(req: Request) {
|
||||
return new Response(`Hello, ${"Windows" as OS}!`);
|
||||
return new Response(`Hello from ${os.arch()}!`);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -2,10 +2,16 @@
|
||||
"private": true,
|
||||
"name": "example",
|
||||
"dependencies": {
|
||||
"axios": "^1.7.7",
|
||||
"elysia": "^0.6.3",
|
||||
"express": "^4.18.2",
|
||||
"mime": "^3.0.0",
|
||||
"mime-db": "^1.52.0"
|
||||
"mime-db": "^1.52.0",
|
||||
"react": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-dom": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-refresh": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-server-dom-bun": "^0.0.0-experimental-603e6108-20241029",
|
||||
"react-server-dom-webpack": "^0.0.0-experimental-380f5d67-20241113"
|
||||
},
|
||||
"scripts": {
|
||||
"run": "node hello.js",
|
||||
|
||||
7
packages/bun-vscode/example/print.ts
Normal file
7
packages/bun-vscode/example/print.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
function getOldestPersonInBooking(ages: number[]): number {
|
||||
console.log("ok");
|
||||
throw new Error("TODO! Perhaps we can use Math.max() for this?");
|
||||
}
|
||||
|
||||
const ticketAges = [5, 25, 30];
|
||||
console.log(getOldestPersonInBooking(ticketAges));
|
||||
9
packages/bun-vscode/example/test.ts
Normal file
9
packages/bun-vscode/example/test.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import axios from "axios";
|
||||
|
||||
async function foo() {
|
||||
const res = await axios.get("http://example.com");
|
||||
|
||||
throw new Error("potato");
|
||||
}
|
||||
|
||||
console.log(await foo());
|
||||
@@ -14,9 +14,6 @@
|
||||
"jsx": "preserve",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"types": [
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
"allowJs": true
|
||||
}
|
||||
}
|
||||
|
||||
13
packages/bun-vscode/example/user.ts
Normal file
13
packages/bun-vscode/example/user.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
// await Bun.sleep(100);
|
||||
|
||||
interface User {
|
||||
name: string;
|
||||
}
|
||||
|
||||
const user = {
|
||||
name: "Alistair",
|
||||
} as User;
|
||||
|
||||
console.log(`First letter us '${user.name.charAt(0)}'`);
|
||||
|
||||
await Bun.sleep(100);
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "bun-vscode",
|
||||
"version": "0.0.15",
|
||||
"version": "0.0.22",
|
||||
"author": "oven",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -18,48 +18,9 @@
|
||||
"esbuild": "^0.19.2",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"description": "The Visual Studio Code extension for Bun.",
|
||||
"displayName": "Bun for Visual Studio Code",
|
||||
"engines": {
|
||||
"vscode": "^1.60.0"
|
||||
},
|
||||
"extensionKind": [
|
||||
"workspace"
|
||||
],
|
||||
"galleryBanner": {
|
||||
"color": "#3B3738",
|
||||
"theme": "dark"
|
||||
},
|
||||
"homepage": "https://bun.sh/",
|
||||
"icon": "assets/icon.png",
|
||||
"keywords": [
|
||||
"bun",
|
||||
"node.js",
|
||||
"javascript",
|
||||
"typescript",
|
||||
"vscode"
|
||||
],
|
||||
"license": "MIT",
|
||||
"publisher": "oven",
|
||||
"scripts": {
|
||||
"build": "node scripts/build.mjs",
|
||||
"pretest": "bun run build",
|
||||
"test": "node scripts/test.mjs",
|
||||
"dev": "vscode-test --config scripts/dev.mjs",
|
||||
"prepublish": "npm version patch && bun run build",
|
||||
"publish": "cd extension && bunx vsce publish"
|
||||
},
|
||||
"workspaceTrust": {
|
||||
"request": "never"
|
||||
},
|
||||
"workspaces": [
|
||||
"../bun-debug-adapter-protocol",
|
||||
"../bun-inspector-protocol"
|
||||
],
|
||||
"activationEvents": [
|
||||
"onStartupFinished"
|
||||
],
|
||||
"browser": "dist/web-extension.js",
|
||||
"bugs": {
|
||||
"url": "https://github.com/oven-sh/bun/issues"
|
||||
},
|
||||
@@ -84,6 +45,18 @@
|
||||
"scope": "window",
|
||||
"default": null
|
||||
},
|
||||
"bun.diagnosticsSocket.enabled": {
|
||||
"type": "boolean",
|
||||
"description": "If Bun extension should communicate with Bun over a socket to show errors in editor.",
|
||||
"scope": "window",
|
||||
"default": true
|
||||
},
|
||||
"bun.bunlockb.enabled": {
|
||||
"type": "boolean",
|
||||
"description": "If visual lockfile viewer (`bun.lockb`) should be enabled ",
|
||||
"scope": "window",
|
||||
"default": true
|
||||
},
|
||||
"bun.debugTerminal.enabled": {
|
||||
"type": "boolean",
|
||||
"description": "If Bun should be added to the JavaScript Debug Terminal.",
|
||||
@@ -95,6 +68,21 @@
|
||||
"description": "If the debugger should stop on the first line when used in the JavaScript Debug Terminal.",
|
||||
"scope": "window",
|
||||
"default": false
|
||||
},
|
||||
"bun.test.filePattern": {
|
||||
"type": "string",
|
||||
"default": "**/*{.test.,.spec.,_test_,_spec_}{js,ts,tsx,jsx,mts,cts}",
|
||||
"description": "Glob pattern to find test files"
|
||||
},
|
||||
"bun.test.customFlag": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Custom flag added to the end of test command"
|
||||
},
|
||||
"bun.test.customScript": {
|
||||
"type": "string",
|
||||
"default": "",
|
||||
"description": "Custom script to use instead of `bun test`, for example script from `package.json`"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -122,6 +110,20 @@
|
||||
"category": "Bun",
|
||||
"enablement": "!inDebugMode && resourceLangId =~ /^(javascript|typescript|javascriptreact|typescriptreact)$/ && !isInDiffEditor && resourceScheme == 'untitled'",
|
||||
"icon": "$(play-circle)"
|
||||
},
|
||||
{
|
||||
"command": "extension.bun.runTest",
|
||||
"title": "Run all tests",
|
||||
"shortTitle": "Run Test",
|
||||
"category": "Bun",
|
||||
"icon": "$(play)"
|
||||
},
|
||||
{
|
||||
"command": "extension.bun.watchTest",
|
||||
"title": "Run all tests in watch mode",
|
||||
"shortTitle": "Run Test Watch",
|
||||
"category": "Bun",
|
||||
"icon": "$(sync)"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
@@ -328,5 +330,43 @@
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": "The Visual Studio Code extension for Bun.",
|
||||
"displayName": "Bun",
|
||||
"engines": {
|
||||
"vscode": "^1.60.0"
|
||||
},
|
||||
"extensionKind": [
|
||||
"workspace"
|
||||
],
|
||||
"galleryBanner": {
|
||||
"color": "#3B3738",
|
||||
"theme": "dark"
|
||||
},
|
||||
"homepage": "https://bun.sh/",
|
||||
"icon": "assets/icon.png",
|
||||
"keywords": [
|
||||
"bun",
|
||||
"node.js",
|
||||
"javascript",
|
||||
"typescript",
|
||||
"vscode"
|
||||
],
|
||||
"license": "MIT",
|
||||
"publisher": "oven",
|
||||
"scripts": {
|
||||
"build": "node scripts/build.mjs",
|
||||
"pretest": "bun run build",
|
||||
"test": "node scripts/test.mjs",
|
||||
"dev": "vscode-test --config scripts/dev.mjs",
|
||||
"prepublish": "npm version patch && bun run build",
|
||||
"publish": "cd extension && bunx vsce publish"
|
||||
},
|
||||
"workspaceTrust": {
|
||||
"request": "never"
|
||||
},
|
||||
"workspaces": [
|
||||
"../bun-debug-adapter-protocol",
|
||||
"../bun-inspector-protocol"
|
||||
]
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
import * as vscode from "vscode";
|
||||
import { registerDebugger, debugCommand } from "./features/debug";
|
||||
import { registerDebugger } from "./features/debug";
|
||||
import { registerDiagnosticsSocket } from "./features/diagnostics/diagnostics";
|
||||
import { registerBunlockEditor } from "./features/lockfile";
|
||||
import { registerPackageJsonProviders } from "./features/tasks/package.json";
|
||||
import { registerTaskProvider } from "./features/tasks/tasks";
|
||||
import { registerTestCodeLens, registerTestRunner } from "./features/tests";
|
||||
|
||||
async function runUnsavedCode() {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
@@ -44,9 +46,14 @@ export function activate(context: vscode.ExtensionContext) {
|
||||
registerDebugger(context);
|
||||
registerTaskProvider(context);
|
||||
registerPackageJsonProviders(context);
|
||||
registerDiagnosticsSocket(context);
|
||||
registerTestRunner(context);
|
||||
registerTestCodeLens(context);
|
||||
|
||||
// Only register for text editors
|
||||
context.subscriptions.push(vscode.commands.registerTextEditorCommand("extension.bun.runUnsavedCode", runUnsavedCode));
|
||||
}
|
||||
|
||||
export function deactivate() {}
|
||||
export function getConfig<T>(path: string, scope?: vscode.ConfigurationScope) {
|
||||
return vscode.workspace.getConfiguration("bun", scope).get<T>(path);
|
||||
}
|
||||
|
||||
@@ -4,12 +4,13 @@ import { join } from "node:path";
|
||||
import * as vscode from "vscode";
|
||||
import {
|
||||
type DAP,
|
||||
DebugAdapter,
|
||||
getAvailablePort,
|
||||
getRandomId,
|
||||
TCPSocketSignal,
|
||||
UnixSignal,
|
||||
WebSocketDebugAdapter,
|
||||
} from "../../../bun-debug-adapter-protocol";
|
||||
import { getConfig } from "../extension";
|
||||
|
||||
export const DEBUG_CONFIGURATION: vscode.DebugConfiguration = {
|
||||
type: "bun",
|
||||
@@ -101,16 +102,18 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
|
||||
}
|
||||
|
||||
const { env } = creationOptions as vscode.TerminalOptions;
|
||||
if (env["BUN_INSPECT"]) {
|
||||
if (env && env["BUN_INSPECT"]) {
|
||||
return;
|
||||
}
|
||||
|
||||
const session = new TerminalDebugSession();
|
||||
await session.initialize();
|
||||
|
||||
const { adapter, signal } = session;
|
||||
|
||||
const stopOnEntry = getConfig("debugTerminal.stopOnEntry") === true;
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
|
||||
const debugSession = new TerminalDebugSession();
|
||||
await debugSession.initialize();
|
||||
const { adapter, signal } = debugSession;
|
||||
const debug = vscode.window.createTerminal({
|
||||
...creationOptions,
|
||||
name: "JavaScript Debug Terminal",
|
||||
@@ -118,6 +121,7 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
|
||||
...env,
|
||||
"BUN_INSPECT": `${adapter.url}?${query}`,
|
||||
"BUN_INSPECT_NOTIFY": signal.url,
|
||||
BUN_INSPECT_CONNECT_TO: "",
|
||||
},
|
||||
});
|
||||
|
||||
@@ -234,7 +238,10 @@ interface RuntimeExceptionThrownEvent {
|
||||
}
|
||||
|
||||
class FileDebugSession extends DebugSession {
|
||||
adapter: DebugAdapter;
|
||||
// If these classes are moved/published, we should make sure
|
||||
// we remove these non-null assertions so consumers of
|
||||
// this lib are not running into these hard
|
||||
adapter!: WebSocketDebugAdapter;
|
||||
sessionId?: string;
|
||||
untitledDocPath?: string;
|
||||
bunEvalPath?: string;
|
||||
@@ -258,7 +265,7 @@ class FileDebugSession extends DebugSession {
|
||||
: `ws+unix://${tmpdir()}/${uniqueId}.sock`;
|
||||
|
||||
const { untitledDocPath, bunEvalPath } = this;
|
||||
this.adapter = new DebugAdapter(url, untitledDocPath, bunEvalPath);
|
||||
this.adapter = new WebSocketDebugAdapter(url, untitledDocPath, bunEvalPath);
|
||||
|
||||
if (untitledDocPath) {
|
||||
this.adapter.on("Adapter.response", (response: DebugProtocolResponse) => {
|
||||
@@ -319,7 +326,7 @@ class FileDebugSession extends DebugSession {
|
||||
}
|
||||
|
||||
class TerminalDebugSession extends FileDebugSession {
|
||||
signal: TCPSocketSignal | UnixSignal;
|
||||
signal!: TCPSocketSignal | UnixSignal;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
@@ -346,6 +353,7 @@ class TerminalDebugSession extends FileDebugSession {
|
||||
env: {
|
||||
"BUN_INSPECT": `${this.adapter.url}?wait=1`,
|
||||
"BUN_INSPECT_NOTIFY": this.signal.url,
|
||||
BUN_INSPECT_CONNECT_TO: "",
|
||||
},
|
||||
isTransient: true,
|
||||
iconPath: new vscode.ThemeIcon("debug-console"),
|
||||
@@ -365,10 +373,6 @@ function getRuntime(scope?: vscode.ConfigurationScope): string {
|
||||
return "bun";
|
||||
}
|
||||
|
||||
function getConfig<T>(path: string, scope?: vscode.ConfigurationScope) {
|
||||
return vscode.workspace.getConfiguration("bun", scope).get<T>(path);
|
||||
}
|
||||
|
||||
export async function runUnsavedCode() {
|
||||
const editor = vscode.window.activeTextEditor;
|
||||
if (!editor || !editor.document.isUntitled) return;
|
||||
|
||||
279
packages/bun-vscode/src/features/diagnostics/diagnostics.ts
Normal file
279
packages/bun-vscode/src/features/diagnostics/diagnostics.ts
Normal file
@@ -0,0 +1,279 @@
|
||||
import * as fs from "node:fs/promises";
|
||||
import { Socket } from "node:net";
|
||||
import * as os from "node:os";
|
||||
import { inspect } from "node:util";
|
||||
import * as vscode from "vscode";
|
||||
import {
|
||||
getAvailablePort,
|
||||
NodeSocketDebugAdapter,
|
||||
TCPSocketSignal,
|
||||
UnixSignal,
|
||||
} from "../../../../bun-debug-adapter-protocol";
|
||||
import type { JSC } from "../../../../bun-inspector-protocol";
|
||||
import { typedGlobalState } from "../../global-state";
|
||||
import { getConfig } from "../../extension";
|
||||
|
||||
const output = vscode.window.createOutputChannel("Bun - Diagnostics");
|
||||
|
||||
const ansiRegex = (() => {
|
||||
const ST = "(?:\\u0007|\\u001B\\u005C|\\u009C)";
|
||||
const pattern = [
|
||||
`[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
|
||||
"(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))",
|
||||
].join("|");
|
||||
|
||||
return new RegExp(pattern, "g");
|
||||
})();
|
||||
|
||||
function stripAnsi(str: string) {
|
||||
return str.replace(ansiRegex, "");
|
||||
}
|
||||
|
||||
class EditorStateManager {
|
||||
private diagnosticCollection: vscode.DiagnosticCollection;
|
||||
private disposables: vscode.Disposable[] = [];
|
||||
|
||||
public constructor() {
|
||||
this.diagnosticCollection = vscode.languages.createDiagnosticCollection("BunDiagnostics");
|
||||
}
|
||||
|
||||
getVisibleEditorsWithErrors() {
|
||||
return vscode.window.visibleTextEditors.filter(editor => {
|
||||
const diagnostics = this.diagnosticCollection.get(editor.document.uri);
|
||||
|
||||
return diagnostics && diagnostics.length > 0;
|
||||
});
|
||||
}
|
||||
|
||||
clearInFile(uri: vscode.Uri) {
|
||||
if (this.diagnosticCollection.has(uri)) {
|
||||
output.appendLine(`Clearing diagnostics for ${uri.toString()}`);
|
||||
this.diagnosticCollection.delete(uri);
|
||||
}
|
||||
}
|
||||
|
||||
clearAll(reason: string) {
|
||||
output.appendLine("Clearing all because: " + reason);
|
||||
this.diagnosticCollection.clear();
|
||||
}
|
||||
|
||||
set(uri: vscode.Uri, diagnostic: vscode.Diagnostic) {
|
||||
this.diagnosticCollection.set(uri, [diagnostic]);
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.clearAll("Editor state was disposed");
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
}
|
||||
}
|
||||
|
||||
class BunDiagnosticsManager {
|
||||
private readonly editorState: EditorStateManager;
|
||||
private readonly signal: UnixSignal | TCPSocketSignal;
|
||||
private readonly context: vscode.ExtensionContext;
|
||||
|
||||
public get signalUrl() {
|
||||
return this.signal.url;
|
||||
}
|
||||
|
||||
private static async getOrRecreateSignal(context: vscode.ExtensionContext) {
|
||||
const globalState = typedGlobalState(context.globalState);
|
||||
const existing = globalState.get("BUN_INSPECT_CONNECT_TO");
|
||||
|
||||
const isWin = os.platform() === "win32";
|
||||
|
||||
if (existing) {
|
||||
if (existing.type === "unix") {
|
||||
output.appendLine(`Reusing existing unix socket: ${existing.url}`);
|
||||
|
||||
if ("url" in existing) {
|
||||
await fs.unlink(existing.url).catch(() => {
|
||||
// ? lol
|
||||
});
|
||||
}
|
||||
|
||||
return new UnixSignal(existing.url);
|
||||
} else {
|
||||
output.appendLine(`Reusing existing tcp socket on: ${existing.port}`);
|
||||
return new TCPSocketSignal(existing.port);
|
||||
}
|
||||
}
|
||||
|
||||
if (isWin) {
|
||||
const port = await getAvailablePort();
|
||||
|
||||
await globalState.update("BUN_INSPECT_CONNECT_TO", {
|
||||
type: "tcp",
|
||||
port,
|
||||
});
|
||||
|
||||
output.appendLine(`Created new tcp socket on: ${port}`);
|
||||
|
||||
return new TCPSocketSignal(port);
|
||||
} else {
|
||||
const signal = new UnixSignal();
|
||||
|
||||
await globalState.update("BUN_INSPECT_CONNECT_TO", {
|
||||
type: "unix",
|
||||
url: signal.url,
|
||||
});
|
||||
|
||||
output.appendLine(`Created new unix socket: ${signal.url}`);
|
||||
|
||||
return signal;
|
||||
}
|
||||
}
|
||||
|
||||
// private static getOrCreateOldVersionInspectURL = createGlobalStateGenerationFn(
|
||||
// "DIAGNOSTICS_BUN_INSPECT",
|
||||
// async () => {
|
||||
// const url =
|
||||
// process.platform === "win32"
|
||||
// ? `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`
|
||||
// : `ws+unix://${os.tmpdir()}/${getRandomId()}.sock`;
|
||||
|
||||
// return url;
|
||||
// },
|
||||
// );
|
||||
|
||||
public static async initialize(context: vscode.ExtensionContext) {
|
||||
const signal = await BunDiagnosticsManager.getOrRecreateSignal(context);
|
||||
|
||||
return new BunDiagnosticsManager(context, signal);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when Bun pings BUN_INSPECT_NOTIFY (indicating a program has started).
|
||||
*/
|
||||
private async handleSocketConnection(socket: Socket) {
|
||||
const debugAdapter = new NodeSocketDebugAdapter(socket);
|
||||
|
||||
this.editorState.clearAll("A new socket connected");
|
||||
|
||||
debugAdapter.on("LifecycleReporter.reload", async () => {
|
||||
this.editorState.clearAll("LifecycleReporter reported a reload event");
|
||||
});
|
||||
|
||||
debugAdapter.on("Inspector.event", e => {
|
||||
output.appendLine(`Received inspector event: ${e.method}`);
|
||||
});
|
||||
|
||||
debugAdapter.on("Inspector.error", e => {
|
||||
output.appendLine(inspect(e, true, null));
|
||||
});
|
||||
|
||||
debugAdapter.on("LifecycleReporter.error", event => this.handleLifecycleError(event));
|
||||
|
||||
const ok = await debugAdapter.start();
|
||||
|
||||
if (!ok) {
|
||||
await vscode.window.showErrorMessage("Failed to start debug adapter");
|
||||
debugAdapter.removeAllListeners();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
debugAdapter.initialize({
|
||||
adapterID: "bun-vsc-terminal-debug-adapter",
|
||||
enableControlFlowProfiler: false,
|
||||
enableLifecycleAgentReporter: true,
|
||||
sendImmediatePreventExit: false,
|
||||
enableDebugger: false, // Performance overhead when debugger is enabled
|
||||
});
|
||||
}
|
||||
|
||||
private handleLifecycleError(event: JSC.LifecycleReporter.ErrorEvent) {
|
||||
const message = stripAnsi(event.message).trim() || event.name || "Error";
|
||||
|
||||
output.appendLine(
|
||||
`Received error event: '{name:${event.name}} ${message.split("\n")[0].trim().substring(0, 100)}'`,
|
||||
);
|
||||
|
||||
const [url = null] = event.urls;
|
||||
const [line = null, col = null] = event.lineColumns;
|
||||
|
||||
if (url === null || url.length === 0 || line === null || col === null) {
|
||||
output.appendLine("No valid url or line/column found in error event");
|
||||
output.appendLine(JSON.stringify(event));
|
||||
return;
|
||||
}
|
||||
|
||||
const uri = vscode.Uri.file(url);
|
||||
|
||||
// range is really just 1 character here..
|
||||
const range = new vscode.Range(new vscode.Position(line - 1, col - 1), new vscode.Position(line - 1, col));
|
||||
|
||||
const document = vscode.workspace.textDocuments.find(doc => doc.uri.toString() === uri.toString());
|
||||
|
||||
// ...but we want to highlight the entire word after(inclusive) the character
|
||||
const rangeOfWord = document?.getWordRangeAtPosition(range.start) ?? range; // Fallback to just the character if no editor or no word range is found
|
||||
|
||||
const diagnostic = new vscode.Diagnostic(rangeOfWord, message, vscode.DiagnosticSeverity.Error);
|
||||
|
||||
diagnostic.source = "Bun";
|
||||
|
||||
const relatedInformation = event.urls.flatMap((url, i) => {
|
||||
if (i === 0 || url === "") {
|
||||
return [];
|
||||
}
|
||||
|
||||
const [line = null, col = null] = event.lineColumns.slice(i * 2, i * 2 + 2);
|
||||
|
||||
if (line === null || col === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
new vscode.DiagnosticRelatedInformation(
|
||||
new vscode.Location(vscode.Uri.file(url), new vscode.Position(line - 1, col - 1)),
|
||||
message,
|
||||
),
|
||||
];
|
||||
});
|
||||
|
||||
diagnostic.relatedInformation = relatedInformation;
|
||||
|
||||
this.editorState.set(uri, diagnostic);
|
||||
}
|
||||
|
||||
public dispose() {
|
||||
return vscode.Disposable.from(this.editorState, {
|
||||
dispose: () => {
|
||||
this.signal.close();
|
||||
this.signal.removeAllListeners();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private constructor(context: vscode.ExtensionContext, signal: UnixSignal | TCPSocketSignal) {
|
||||
this.editorState = new EditorStateManager();
|
||||
this.signal = signal;
|
||||
this.context = context;
|
||||
|
||||
this.context.subscriptions.push(
|
||||
// on did type
|
||||
vscode.workspace.onDidChangeTextDocument(e => {
|
||||
this.editorState.clearInFile(e.document.uri);
|
||||
}),
|
||||
);
|
||||
|
||||
this.signal.on("Signal.Socket.connect", this.handleSocketConnection.bind(this));
|
||||
}
|
||||
}
|
||||
|
||||
const description = new vscode.MarkdownString(
|
||||
"Bun's VSCode extension communicates with Bun over a socket. We set the url in your terminal with the `BUN_INSPECT_NOTIFY` environment variable",
|
||||
);
|
||||
|
||||
export async function registerDiagnosticsSocket(context: vscode.ExtensionContext) {
|
||||
context.environmentVariableCollection.clear();
|
||||
context.environmentVariableCollection.description = description;
|
||||
|
||||
if (!getConfig("diagnosticsSocket.enabled")) return;
|
||||
|
||||
const manager = await BunDiagnosticsManager.initialize(context);
|
||||
|
||||
context.environmentVariableCollection.replace("BUN_INSPECT_CONNECT_TO", manager.signalUrl);
|
||||
|
||||
context.subscriptions.push(manager);
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import * as vscode from "vscode";
|
||||
import { styleLockfile } from "./lockfile.style";
|
||||
import { getConfig } from "../../extension";
|
||||
|
||||
export type BunLockfile = vscode.CustomDocument & {
|
||||
readonly preview: string;
|
||||
@@ -36,6 +37,11 @@ export class BunLockfileEditorProvider implements vscode.CustomReadonlyEditorPro
|
||||
}
|
||||
|
||||
function renderLockfile({ webview }: vscode.WebviewPanel, preview: string, extensionUri: vscode.Uri): void {
|
||||
if (!getConfig("bunlockb.enabled")) {
|
||||
webview.html = "<code>bun.bunlockb</code> config option is disabled."
|
||||
return
|
||||
}
|
||||
|
||||
const styleVSCodeUri = webview.asWebviewUri(vscode.Uri.joinPath(extensionUri, "assets", "vscode.css"));
|
||||
const lockfileContent = styleLockfile(preview);
|
||||
|
||||
@@ -49,7 +55,7 @@ function renderLockfile({ webview }: vscode.WebviewPanel, preview: string, exten
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; style-src ${webview.cspSource};">
|
||||
<meta http-equiv="Content-Security-Policy" content="default-src 'none'; style-src ${webview.cspSource};">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<link href="${styleVSCodeUri}" rel="stylesheet" />
|
||||
</head>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user