Add BUN_DEBUG flag to control where debug logs go (#9019)

* Add `BUN_DEBUG` flag to control where debug logs go

* Update all the actions

* Configure temp

* use spawn instead of rm

* Use CLOSE_RANGE_CLOEXEC

* Make some tests more reproducible

* Update hot.test.ts

* Detect file descriptor leaks and wait for stdout

* Update runner.node.mjs

* Update preload.ts

---------

Co-authored-by: Jarred Sumner <709451+Jarred-Sumner@users.noreply.github.com>
This commit is contained in:
Jarred Sumner
2024-02-21 14:13:43 -08:00
committed by GitHub
parent a0be3cb2ff
commit 6184542682
20 changed files with 521 additions and 317 deletions

View File

@@ -51,14 +51,14 @@ jobs:
runner: linux-arm64 runner: linux-arm64
build_machine_arch: aarch64 build_machine_arch: aarch64
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
with: with:
submodules: false submodules: false
ref: ${{github.sha}} ref: ${{github.sha}}
clean: true clean: true
- run: | - run: |
bash ./scripts/update-submodules.sh bash ./scripts/update-submodules.sh
- uses: docker/setup-buildx-action@v2 - uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
@@ -66,7 +66,7 @@ jobs:
run: | run: |
rm -rf ${{runner.temp}}/release rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -74,7 +74,7 @@ jobs:
- run: | - run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}} mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
with: with:
context: . context: .
push: false push: false
@@ -113,11 +113,11 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}} zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: bun-${{matrix.tag}}-profile name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: bun-${{matrix.tag}} name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip

View File

@@ -86,7 +86,7 @@ jobs:
submodules: recursive submodules: recursive
ref: ${{github.sha}} ref: ${{github.sha}}
clean: true clean: true
- uses: docker/setup-buildx-action@v2 - uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
@@ -94,7 +94,7 @@ jobs:
run: | run: |
rm -rf ${{runner.temp}}/release rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -102,7 +102,7 @@ jobs:
- run: | - run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}} mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push - name: Build and push
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
with: with:
context: . context: .
push: false push: false
@@ -154,19 +154,19 @@ jobs:
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}} zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: bun-${{matrix.tag}}-profile name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: bun-${{matrix.tag}} name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: bun-obj-${{matrix.tag}} name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}}-dependencies name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies path: ${{runner.temp}}/release/bun-dependencies
@@ -234,7 +234,7 @@ jobs:
clean: true clean: true
- id: download - id: download
name: Download name: Download
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: bun-${{matrix.tag}} name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release path: ${{runner.temp}}/release
@@ -275,6 +275,7 @@ jobs:
name: Test (node runner) name: Test (node runner)
env: env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}} # if: ${{github.event.inputs.use_bun == 'false'}}
@@ -283,7 +284,7 @@ jobs:
ulimit -c ulimit -c
node packages/bun-internal-test/src/runner.node.mjs || true node packages/bun-internal-test/src/runner.node.mjs || true
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
if: steps.test.outputs.failing_tests != '' if: steps.test.outputs.failing_tests != ''
with: with:
name: cores name: cores

View File

@@ -51,20 +51,20 @@ jobs:
# run: git submodule update --init --recursive --depth=1 --progress --force # run: git submodule update --init --recursive --depth=1 --progress --force
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object - name: Compile Zig Object
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
if: runner.arch == 'X64' if: runner.arch == 'X64'
with: with:
context: . context: .
@@ -84,7 +84,7 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object - name: Upload Zig Object
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }} name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o path: ${{runner.temp}}/release/bun-zig.o
@@ -141,7 +141,7 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
id: cache-deps-restore id: cache-deps-restore
uses: actions/cache/restore@v3 uses: actions/cache/restore@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -159,13 +159,13 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save id: cache-deps-save
uses: actions/cache/save@v3 uses: actions/cache/save@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies - name: Upload submodule dependencies
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -235,7 +235,7 @@ jobs:
bash compile-cpp-only.sh -v bash compile-cpp-only.sh -v
- name: Upload C++ - name: Upload C++
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
@@ -285,19 +285,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++ - name: Download C++
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object - name: Download Zig Object
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.obj }} name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies - name: Downloaded submodule dependencies
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -330,11 +330,11 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}} zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}}-profile name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
@@ -394,12 +394,12 @@ jobs:
steps: steps:
- id: checkout - id: checkout
name: Checkout name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
submodules: false submodules: false
- id: download - id: download
name: Download name: Download
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/release path: ${{runner.temp}}/release
@@ -426,6 +426,7 @@ jobs:
name: Test (node runner) name: Test (node runner)
env: env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}} # if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -55,20 +55,20 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object - name: Compile Zig Object
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
with: with:
context: . context: .
push: false push: false
@@ -97,7 +97,7 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object - name: Upload Zig Object
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }} name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o path: ${{runner.temp}}/release/bun-zig.o
@@ -146,7 +146,7 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
id: cache-deps-restore id: cache-deps-restore
uses: actions/cache/restore@v3 uses: actions/cache/restore@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -164,13 +164,13 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save id: cache-deps-save
uses: actions/cache/save@v3 uses: actions/cache/save@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies - name: Upload submodule dependencies
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -240,7 +240,7 @@ jobs:
bash compile-cpp-only.sh -v bash compile-cpp-only.sh -v
- name: Upload C++ - name: Upload C++
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
@@ -262,7 +262,7 @@ jobs:
runner: macos-12-large runner: macos-12-large
artifact: bun-obj-darwin-x64-baseline artifact: bun-obj-darwin-x64-baseline
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Checkout submodules - name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force run: git submodule update --init --recursive --depth=1 --progress --force
@@ -286,19 +286,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++ - name: Download C++
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object - name: Download Zig Object
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.obj }} name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies - name: Downloaded submodule dependencies
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -331,11 +331,11 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}} zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}}-profile name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
@@ -396,12 +396,12 @@ jobs:
steps: steps:
- id: checkout - id: checkout
name: Checkout name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
submodules: false submodules: false
- id: download - id: download
name: Download name: Download
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/release path: ${{runner.temp}}/release
@@ -428,6 +428,7 @@ jobs:
name: Test (node runner) name: Test (node runner)
env: env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}} # if: ${{github.event.inputs.use_bun == 'false'}}

View File

@@ -52,20 +52,20 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Compile Zig Object - name: Compile Zig Object
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
with: with:
context: . context: .
push: false push: false
@@ -94,7 +94,7 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object - name: Upload Zig Object
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }} name: ${{ matrix.tag }}
path: ${{runner.temp}}/release/bun-zig.o path: ${{runner.temp}}/release/bun-zig.o
@@ -144,7 +144,7 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
id: cache-deps-restore id: cache-deps-restore
uses: actions/cache/restore@v3 uses: actions/cache/restore@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }} key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
@@ -162,13 +162,13 @@ jobs:
- name: Cache submodule dependencies - name: Cache submodule dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save id: cache-deps-save
uses: actions/cache/save@v3 uses: actions/cache/save@v4
with: with:
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
- name: Upload submodule dependencies - name: Upload submodule dependencies
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -238,7 +238,7 @@ jobs:
bash compile-cpp-only.sh -v bash compile-cpp-only.sh -v
- name: Upload C++ - name: Upload C++
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
@@ -260,7 +260,7 @@ jobs:
runner: macos-12-large runner: macos-12-large
artifact: bun-obj-darwin-x64 artifact: bun-obj-darwin-x64
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v4
- name: Checkout submodules - name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress --force run: git submodule update --init --recursive --depth=1 --progress --force
@@ -284,19 +284,19 @@ jobs:
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
- name: Download C++ - name: Download C++
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-cpp name: ${{ matrix.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj path: ${{ runner.temp }}/bun-cpp-obj
- name: Download Zig Object - name: Download Zig Object
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.obj }} name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release path: ${{ runner.temp }}/release
- name: Downloaded submodule dependencies - name: Downloaded submodule dependencies
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ matrix.tag }}-deps name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps path: ${{runner.temp}}/bun-deps
@@ -329,11 +329,11 @@ jobs:
zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile zip -r ${{matrix.tag}}-profile.zip ${{matrix.tag}}-profile
zip -r ${{matrix.tag}}.zip ${{matrix.tag}} zip -r ${{matrix.tag}}.zip ${{matrix.tag}}
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}}-profile name: ${{matrix.tag}}-profile
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
@@ -393,12 +393,12 @@ jobs:
steps: steps:
- id: checkout - id: checkout
name: Checkout name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
submodules: false submodules: false
- id: download - id: download
name: Download name: Download
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{matrix.tag}} name: ${{matrix.tag}}
path: ${{runner.temp}}/release path: ${{runner.temp}}/release
@@ -426,6 +426,7 @@ jobs:
env: env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TMPDIR: ${{runner.temp}}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}} # if: ${{github.event.inputs.use_bun == 'false'}}
run: | run: |

View File

@@ -51,7 +51,7 @@ jobs:
working-directory: packages/bun-release working-directory: packages/bun-release
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup GPG - name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5 uses: crazy-max/ghaction-import-gpg@v5
with: with:
@@ -81,7 +81,7 @@ jobs:
working-directory: packages/bun-release working-directory: packages/bun-release
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Bun - name: Setup Bun
uses: oven-sh/setup-bun@v1 uses: oven-sh/setup-bun@v1
with: with:
@@ -105,7 +105,7 @@ jobs:
working-directory: packages/bun-types working-directory: packages/bun-types
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Node.js - name: Setup Node.js
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
@@ -170,12 +170,12 @@ jobs:
suffix: -distroless suffix: -distroless
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Docker emulator - name: Setup Docker emulator
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v2
- id: buildx - id: buildx
name: Setup Docker buildx name: Setup Docker buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
with: with:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
- id: metadata - id: metadata
@@ -192,12 +192,12 @@ jobs:
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }} type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }} type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker - name: Login to Docker
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker - name: Push to Docker
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
with: with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }} context: ./dockerhub/${{ matrix.dir || matrix.variant }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
@@ -216,7 +216,7 @@ jobs:
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }} if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
repository: oven-sh/homebrew-bun repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }} token: ${{ secrets.ROBOBUN_TOKEN }}
@@ -252,7 +252,7 @@ jobs:
working-directory: packages/bun-release working-directory: packages/bun-release
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Setup Bun - name: Setup Bun
uses: oven-sh/setup-bun@v1 uses: oven-sh/setup-bun@v1
with: with:

View File

@@ -18,7 +18,7 @@ jobs:
steps: steps:
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install bun - name: Install bun
uses: oven-sh/setup-bun@v1 uses: oven-sh/setup-bun@v1

View File

@@ -60,13 +60,13 @@ jobs:
- run: git config --global core.autocrlf false && git config --global core.eol lf - run: git config --global core.autocrlf false && git config --global core.eol lf
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Setup Docker Buildx - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v3
id: buildx id: buildx
with: with:
install: true install: true
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
uses: docker/login-action@v2 uses: docker/login-action@v3
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.actor }} username: ${{ github.actor }}
@@ -79,7 +79,7 @@ jobs:
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
- name: Compile Zig Object - name: Compile Zig Object
uses: docker/build-push-action@v3 uses: docker/build-push-action@v5
if: runner.arch == 'X64' if: runner.arch == 'X64'
with: with:
context: . context: .
@@ -102,7 +102,7 @@ jobs:
outputs: type=local,dest=${{runner.temp}}/release outputs: type=local,dest=${{runner.temp}}/release
- name: Upload Zig Object - name: Upload Zig Object
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{runner.temp}}/release/bun-zig.o path: ${{runner.temp}}/release/bun-zig.o
@@ -138,7 +138,7 @@ jobs:
- name: Try fetch dependencies - name: Try fetch dependencies
id: cache-deps-restore id: cache-deps-restore
uses: actions/cache/restore@v3 uses: actions/cache/restore@v4
with: with:
path: bun-deps path: bun-deps
key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }} key: bun-deps-${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-${{ steps.submodule-versions.outputs.sha }}
@@ -165,7 +165,7 @@ jobs:
.\scripts\all-dependencies.ps1 .\scripts\all-dependencies.ps1
- name: Upload Dependencies - name: Upload Dependencies
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps/ path: bun-deps/
@@ -173,7 +173,7 @@ jobs:
- name: Cache Dependencies - name: Cache Dependencies
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }} if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
id: cache-deps-save id: cache-deps-save
uses: actions/cache/save@v3 uses: actions/cache/save@v4
with: with:
path: bun-deps path: bun-deps
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }} key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
@@ -204,7 +204,7 @@ jobs:
if: ${{ env.canary == 'true' }} if: ${{ env.canary == 'true' }}
run: | run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build-codegen-win32-x64/ path: build-codegen-win32-x64/
@@ -228,7 +228,7 @@ jobs:
version: ${{ env.LLVM_VERSION }} version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja - run: choco install -y ninja
- name: Download Codegen - name: Download Codegen
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build path: build
@@ -263,7 +263,7 @@ jobs:
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" } if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v .\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" } if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: build/bun-cpp-objects.a path: build/bun-cpp-objects.a
@@ -288,22 +288,22 @@ jobs:
version: ${{ env.LLVM_VERSION }} version: ${{ env.LLVM_VERSION }}
- run: choco install -y ninja - run: choco install -y ninja
- name: Download Codegen - name: Download Codegen
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-codegen
path: build path: build
- name: Download Dependencies - name: Download Dependencies
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-deps${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-deps path: bun-deps
- name: Download Zig Object - name: Download Zig Object
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-zig${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-zig path: bun-zig
- name: Download C++ Objects - name: Download C++ Objects
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}-cpp${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: bun-cpp path: bun-cpp
@@ -336,11 +336,11 @@ jobs:
cp -r build\bun.pdb "$Dist\bun.pdb" cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive "$Dist" "$Dist.zip" Compress-Archive "$Dist" "$Dist.zip"
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }} name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}.zip
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip path: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile.zip
@@ -398,12 +398,12 @@ jobs:
- run: git config --global core.autocrlf false && git config --global core.eol lf - run: git config --global core.autocrlf false && git config --global core.eol lf
- id: checkout - id: checkout
name: Checkout name: Checkout
uses: actions/checkout@v3 uses: actions/checkout@v4
with: with:
submodules: false submodules: false
- id: download - id: download
name: Download Release name: Download Release
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
with: with:
name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile name: ${{ env.tag }}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile
path: ${{runner.temp}}/release path: ${{runner.temp}}/release
@@ -431,6 +431,7 @@ jobs:
name: Run tests name: Run tests
env: env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }} SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TMPDIR: ${{runner.temp}}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }} TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }} TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
run: | run: |

View File

@@ -6,3 +6,4 @@
# #
# Instead, we can only scan the test directory for Bun's runtime tests # Instead, we can only scan the test directory for Bun's runtime tests
root = "test" root = "test"
preload = "./test/preload.ts"

View File

@@ -195,7 +195,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Install bun - name: Install bun
uses: oven-sh/setup-bun@v1 uses: oven-sh/setup-bun@v1
- name: Install dependencies - name: Install dependencies

View File

@@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# ... # ...
- uses: actions/checkout@v3 - uses: actions/checkout@v4
+ - uses: oven-sh/setup-bun@v1 + - uses: oven-sh/setup-bun@v1
# run any `bun` or `bunx` command # run any `bun` or `bunx` command

View File

@@ -1,10 +1,11 @@
import * as action from "@actions/core"; import * as action from "@actions/core";
import { spawn, spawnSync } from "child_process"; import { spawn, spawnSync } from "child_process";
import { rmSync, writeFileSync, readFileSync } from "fs"; import { rmSync, writeFileSync, readFileSync, mkdirSync, openSync, close, closeSync } from "fs";
import { readFile } from "fs/promises"; import { readFile } from "fs/promises";
import { readdirSync } from "node:fs"; import { readdirSync } from "node:fs";
import { resolve, basename } from "node:path"; import { resolve, basename } from "node:path";
import { cpus, hostname, totalmem, userInfo } from "os"; import { cpus, hostname, tmpdir, totalmem, userInfo } from "os";
import { join } from "path";
import { fileURLToPath } from "url"; import { fileURLToPath } from "url";
const run_start = new Date(); const run_start = new Date();
@@ -24,6 +25,20 @@ process.chdir(cwd);
const ci = !!process.env["GITHUB_ACTIONS"]; const ci = !!process.env["GITHUB_ACTIONS"];
const enableProgressBar = !ci; const enableProgressBar = !ci;
var prevTmpdir = "";
function maketemp() {
if (prevTmpdir && !windows) {
spawn("rm", ["-rf", prevTmpdir], { stdio: "inherit", detached: true }).unref();
}
prevTmpdir = join(
tmpdir(),
"bun-test-tmp-" + (Date.now() | 0).toString() + "_" + ((Math.random() * 100_000_0) | 0).toString(36),
);
mkdirSync(prevTmpdir, { recursive: true });
return prevTmpdir;
}
function defaultConcurrency() { function defaultConcurrency() {
// Concurrency causes more flaky tests, only enable it by default on windows // Concurrency causes more flaky tests, only enable it by default on windows
// See https://github.com/oven-sh/bun/issues/8071 // See https://github.com/oven-sh/bun/issues/8071
@@ -40,10 +55,19 @@ const extensions = [".js", ".ts", ".jsx", ".tsx"];
const git_sha = const git_sha =
process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim(); process.env["GITHUB_SHA"] ?? spawnSync("git", ["rev-parse", "HEAD"], { encoding: "utf-8" }).stdout.trim();
const TEST_FILTER = process.env.BUN_TEST_FILTER;
function isTest(path) { function isTest(path) {
if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) { if (!basename(path).includes(".test.") || !extensions.some(ext => path.endsWith(ext))) {
return false; return false;
} }
if (TEST_FILTER) {
if (!path.includes(TEST_FILTER)) {
return false;
}
}
return true; return true;
} }
@@ -100,6 +124,33 @@ const failing_tests = [];
const passing_tests = []; const passing_tests = [];
const fixes = []; const fixes = [];
const regressions = []; const regressions = [];
let maxFd = -1;
function getMaxFileDescriptor(path) {
if (process.platform === "win32") {
return -1;
}
hasInitialMaxFD = true;
if (process.platform === "linux") {
try {
readdirSync("/proc/self/fd").forEach(name => {
const fd = parseInt(name.trim(), 10);
if (Number.isSafeInteger(fd) && fd >= 0) {
maxFd = Math.max(maxFd, fd);
}
});
return maxFd;
} catch {}
}
const devnullfd = openSync("/dev/null", "r");
closeSync(devnullfd);
maxFd = devnullfd + 1;
return maxFd;
}
let hasInitialMaxFD = false;
async function runTest(path) { async function runTest(path) {
const name = path.replace(cwd, "").slice(1); const name = path.replace(cwd, "").slice(1);
@@ -107,14 +158,16 @@ async function runTest(path) {
const expected_crash_reason = windows const expected_crash_reason = windows
? await readFile(resolve(path), "utf-8").then(data => { ? await readFile(resolve(path), "utf-8").then(data => {
const match = data.match(/@known-failing-on-windows:(.*)\n/); const match = data.match(/@known-failing-on-windows:(.*)\n/);
return match ? match[1].trim() : null; return match ? match[1].trim() : null;
}) })
: null; : null;
const start = Date.now(); const start = Date.now();
await new Promise((done, reject) => { await new Promise((finish, reject) => {
const chunks = [];
const proc = spawn(bunExe, ["test", resolve(path)], { const proc = spawn(bunExe, ["test", resolve(path)], {
stdio: ["ignore", "pipe", "pipe"], stdio: ["ignore", "pipe", "pipe"],
timeout: 1000 * 60 * 3, timeout: 1000 * 60 * 3,
@@ -127,10 +180,26 @@ async function runTest(path) {
// reproduce CI results locally // reproduce CI results locally
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true", GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
BUN_DEBUG_QUIET_LOGS: "1", BUN_DEBUG_QUIET_LOGS: "1",
TMPDIR: maketemp(),
}, },
}); });
proc.stdout.once("end", () => {
done();
});
let doneCalls = 0;
let done = () => {
// TODO: wait for stderr as well
// spawn.test currently causes it to hang
if (doneCalls++ == 1) {
actuallyDone();
}
};
function actuallyDone() {
output = Buffer.concat(chunks).toString();
finish();
}
const chunks = [];
proc.stdout.on("data", chunk => { proc.stdout.on("data", chunk => {
chunks.push(chunk); chunks.push(chunk);
if (run_concurrency === 1) process.stdout.write(chunk); if (run_concurrency === 1) process.stdout.write(chunk);
@@ -140,18 +209,32 @@ async function runTest(path) {
if (run_concurrency === 1) process.stderr.write(chunk); if (run_concurrency === 1) process.stderr.write(chunk);
}); });
proc.on("exit", (code_, signal_) => { proc.once("exit", (code_, signal_) => {
exitCode = code_; exitCode = code_;
signal = signal_; signal = signal_;
output = Buffer.concat(chunks).toString();
done(); done();
}); });
proc.on("error", err_ => { proc.once("error", err_ => {
err = err_; err = err_;
done(); done = () => {};
actuallyDone();
}); });
}); });
if (!hasInitialMaxFD) {
getMaxFileDescriptor();
} else if (maxFd > 0) {
const prevMaxFd = maxFd;
maxFd = getMaxFileDescriptor();
if (maxFd > prevMaxFd) {
process.stderr.write(
`\n\x1b[31mewarn\x1b[0;2m:\x1b[0m file descriptor leak in ${name}, delta: ${
maxFd - prevMaxFd
}, current: ${maxFd}, previous: ${prevMaxFd}\n`,
);
}
}
const passed = exitCode === 0 && !err && !signal; const passed = exitCode === 0 && !err && !signal;
let reason = ""; let reason = "";
@@ -195,7 +278,8 @@ async function runTest(path) {
} }
console.log( console.log(
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖" `\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`, } ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
); );
@@ -319,9 +403,10 @@ console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"
console.log(header); console.log(header);
console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n"); console.log("\n" + "-".repeat(Math.min(process.stdout.columns || 40, 80)) + "\n");
let report = `# bun test on ${process.env["GITHUB_REF"] ?? let report = `# bun test on ${
process.env["GITHUB_REF"] ??
spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim() spawnSync("git", ["rev-parse", "--abbrev-ref", "HEAD"], { encoding: "utf-8" }).stdout.trim()
} }
\`\`\` \`\`\`
${header} ${header}
@@ -345,7 +430,8 @@ if (regressions.length > 0) {
report += regressions report += regressions
.map( .map(
({ path, reason, expected_crash_reason }) => ({ path, reason, expected_crash_reason }) =>
`- [\`${path}\`](${sectionLink(path)}) ${reason}${expected_crash_reason ? ` (expected: ${expected_crash_reason})` : "" `- [\`${path}\`](${sectionLink(path)}) ${reason}${
expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
}`, }`,
) )
.join("\n"); .join("\n");

View File

@@ -27,7 +27,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v3 uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL

View File

@@ -20,7 +20,7 @@ jobs:
language: c++ language: c++
fuzz-seconds: 600 fuzz-seconds: 600
- name: Upload crash - name: Upload crash
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
if: failure() && steps.build.outcome == 'success' if: failure() && steps.build.outcome == 'success'
with: with:
name: artifacts name: artifacts

View File

@@ -12,13 +12,14 @@
#include <sys/syscall.h> #include <sys/syscall.h>
#include <sys/resource.h> #include <sys/resource.h>
static int close_range(unsigned int first)
{
return syscall(__NR_close_range, first, ~0U, 0);
}
extern char** environ; extern char** environ;
#ifndef CLOSE_RANGE_CLOEXEC
#define CLOSE_RANGE_CLOEXEC (1U << 2)
#endif
extern "C" ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags);
enum FileActionType : uint8_t { enum FileActionType : uint8_t {
None, None,
Close, Close,
@@ -70,7 +71,7 @@ extern "C" ssize_t posix_spawn_bun(
const auto childFailed = [&]() -> ssize_t { const auto childFailed = [&]() -> ssize_t {
res = errno; res = errno;
status = res; status = res;
close_range(0); bun_close_range(0, ~0U, 0);
_exit(127); _exit(127);
// should never be reached // should never be reached
@@ -151,7 +152,9 @@ extern "C" ssize_t posix_spawn_bun(
if (!envp) if (!envp)
envp = environ; envp = environ;
close_range(current_max_fd + 1); if (bun_close_range(current_max_fd + 1, ~0U, CLOSE_RANGE_CLOEXEC) != 0) {
bun_close_range(current_max_fd + 1, ~0U, 0);
}
execve(path, argv, envp); execve(path, argv, envp);
_exit(127); _exit(127);

View File

@@ -171,7 +171,7 @@ extern "C" int clock_gettime_monotonic(int64_t* tv_sec, int64_t* tv_nsec)
#endif #endif
// close_range is glibc > 2.33, which is very new // close_range is glibc > 2.33, which is very new
static ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags) extern "C" ssize_t bun_close_range(unsigned int start, unsigned int end, unsigned int flags)
{ {
return syscall(__NR_close_range, start, end, flags); return syscall(__NR_close_range, start, end, flags);
} }

View File

@@ -486,7 +486,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f
if (!out_set) { if (!out_set) {
buffered_writer = .{ buffered_writer = .{
.unbuffered_writer = writer(), .unbuffered_writer = scopedWriter(),
}; };
out = buffered_writer.writer(); out = buffered_writer.writer();
out_set = true; out_set = true;
@@ -495,7 +495,7 @@ pub fn scoped(comptime tag: @Type(.EnumLiteral), comptime disabled: bool) _log_f
lock.lock(); lock.lock();
defer lock.unlock(); defer lock.unlock();
if (Output.enable_ansi_colors_stderr) { if (Output.enable_ansi_colors_stdout and buffered_writer.unbuffered_writer.context.handle == writer().context.handle) {
out.print(comptime prettyFmt("<r><d>[" ++ @tagName(tag) ++ "]<r> " ++ fmt, true), args) catch { out.print(comptime prettyFmt("<r><d>[" ++ @tagName(tag) ++ "]<r> " ++ fmt, true), args) catch {
really_disable = true; really_disable = true;
return; return;
@@ -804,6 +804,56 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype)
} }
} }
fn scopedWriter() std.fs.File.Writer {
if (comptime !Environment.isDebug) {
@compileError("scopedWriter() should only be called in debug mode");
}
const Scoped = struct {
pub var loaded_env: ?bool = null;
pub var scoped_file_writer: std.fs.File.Writer = undefined;
pub var scoped_file_writer_lock: bun.Lock = bun.Lock.init();
};
std.debug.assert(source_set);
Scoped.scoped_file_writer_lock.lock();
defer Scoped.scoped_file_writer_lock.unlock();
const use_env = Scoped.loaded_env orelse brk: {
if (bun.getenvZ("BUN_DEBUG")) |path| {
if (path.len > 0 and !strings.eql(path, "0") and !strings.eql(path, "false")) {
if (std.fs.path.dirname(path)) |dir| {
std.fs.cwd().makePath(dir) catch {};
}
// do not use libuv through this code path, since it might not be initialized yet.
const fd = std.os.openat(
std.fs.cwd().fd,
path,
std.os.O.TRUNC | std.os.O.CREAT | std.os.O.WRONLY,
0o644,
) catch |err_| {
// Ensure we don't panic inside panic
Scoped.loaded_env = false;
Scoped.scoped_file_writer_lock.unlock();
Output.panic("Failed to open file for debug output: {s} ({s})", .{ @errorName(err_), path });
};
Scoped.scoped_file_writer = bun.toFD(fd).asFile().writer();
Scoped.loaded_env = true;
break :brk true;
}
}
Scoped.loaded_env = false;
break :brk false;
};
if (use_env) {
return Scoped.scoped_file_writer;
}
return source.stream.writer();
}
/// Print a red error message with "error: " as the prefix. For custom prefixes see `err()` /// Print a red error message with "error: " as the prefix. For custom prefixes see `err()`
pub inline fn errGeneric(comptime fmt: []const u8, args: anytype) void { pub inline fn errGeneric(comptime fmt: []const u8, args: anytype) void {
prettyErrorln("<red>error<r><d>:<r> " ++ fmt, args); prettyErrorln("<red>error<r><d>:<r> " ++ fmt, args);

View File

@@ -1,258 +1,307 @@
import { spawn } from "bun"; import { spawn } from "bun";
import { expect, it } from "bun:test"; import { beforeAll, beforeEach, expect, it } from "bun:test";
import { bunExe, bunEnv, tempDirWithFiles, bunRun, bunRunAsScript } from "harness"; import { bunExe, bunEnv, tempDirWithFiles, bunRun, bunRunAsScript } from "harness";
import { readFileSync, renameSync, rmSync, unlinkSync, writeFileSync, copyFileSync } from "fs"; import { cpSync, readFileSync, renameSync, rmSync, unlinkSync, writeFileSync, copyFileSync } from "fs";
import { join } from "path"; import { join } from "path";
import { tmpdir } from "os";
const hotRunnerRoot = join(import.meta.dir, "hot-runner-root.js"); let hotRunnerRoot: string = "",
cwd = "";
beforeEach(() => {
const hotPath = join(tmpdir(), "bun-hot-test-" + (Date.now() | 0) + "_" + Math.random().toString(36).slice(2));
hotRunnerRoot = join(hotPath, "hot-runner-root.js");
rmSync(hotPath, { recursive: true, force: true });
cpSync(import.meta.dir, hotPath, { recursive: true, force: true });
cwd = hotPath;
});
it("should hot reload when file is overwritten", async () => { it("should hot reload when file is overwritten", async () => {
const root = hotRunnerRoot; const root = hotRunnerRoot;
const runner = spawn({ try {
cmd: [bunExe(), "--hot", "run", root], var runner = spawn({
env: bunEnv, cmd: [bunExe(), "--hot", "run", root],
stdout: "pipe", env: bunEnv,
stderr: "inherit", cwd,
stdin: "ignore", stdout: "pipe",
}); stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0; var reloadCounter = 0;
async function onReload() { async function onReload() {
writeFileSync(root, readFileSync(root, "utf-8")); writeFileSync(root, readFileSync(root, "utf-8"));
}
for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
} }
if (any) await onReload(); for await (const line of runner.stdout) {
} var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
expect(reloadCounter).toBe(3); if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
if (any) await onReload();
}
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
}); });
it("should recover from errors", async () => { it("should recover from errors", async () => {
const root = hotRunnerRoot; const root = hotRunnerRoot;
const runner = spawn({ try {
cmd: [bunExe(), "--hot", "run", root], var runner = spawn({
env: bunEnv, cmd: [bunExe(), "--hot", "run", root],
stdout: "pipe", env: bunEnv,
stderr: "pipe", cwd,
stdin: "ignore", stdout: "pipe",
}); stderr: "pipe",
stdin: "ignore",
});
let reloadCounter = 0; let reloadCounter = 0;
const input = readFileSync(root, "utf-8"); const input = readFileSync(root, "utf-8");
function onReloadGood() { function onReloadGood() {
writeFileSync(root, input); writeFileSync(root, input);
}
function onReloadError() {
writeFileSync(root, "throw new Error('error');\n");
}
var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood];
var errors: string[] = [];
var onError: (...args: any[]) => void;
(async () => {
for await (let line of runner.stderr) {
var str = new TextDecoder().decode(line);
errors.push(str);
// @ts-ignore
onError && onError(str);
} }
})();
for await (const line of runner.stdout) { function onReloadError() {
var str = new TextDecoder().decode(line); writeFileSync(root, "throw new Error('error');\n");
var any = false; }
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) { var queue = [onReloadError, onReloadGood, onReloadError, onReloadGood];
runner.unref(); var errors: string[] = [];
runner.kill(); var onError: (...args: any[]) => void;
break; (async () => {
for await (let line of runner.stderr) {
var str = new TextDecoder().decode(line);
errors.push(str);
// @ts-ignore
onError && onError(str);
} }
})();
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); for await (const line of runner.stdout) {
any = true; var str = new TextDecoder().decode(line);
} var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (any) { if (reloadCounter === 3) {
queue.shift()!(); runner.unref();
await new Promise<void>((resolve, reject) => { runner.kill();
if (errors.length > 0) { break;
errors.length = 0;
resolve();
return;
} }
onError = resolve; expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
}); any = true;
}
queue.shift()!(); if (any) {
queue.shift()!();
await new Promise<void>((resolve, reject) => {
if (errors.length > 0) {
errors.length = 0;
resolve();
return;
}
onError = resolve;
});
queue.shift()!();
}
} }
}
expect(reloadCounter).toBe(3); expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
}); });
it("should not hot reload when a random file is written", async () => { it("should not hot reload when a random file is written", async () => {
const root = hotRunnerRoot; const root = hotRunnerRoot;
const runner = spawn({ try {
cmd: [bunExe(), "--hot", "run", root], var runner = spawn({
env: bunEnv, cmd: [bunExe(), "--hot", "run", root],
stdout: "pipe", env: bunEnv,
stderr: "inherit", cwd,
stdin: "ignore", stdout: "pipe",
}); stderr: "inherit",
stdin: "ignore",
});
let reloadCounter = 0; let reloadCounter = 0;
const code = readFileSync(root, "utf-8"); const code = readFileSync(root, "utf-8");
async function onReload() { async function onReload() {
writeFileSync(root + ".another.yet.js", code); writeFileSync(root + ".another.yet.js", code);
unlinkSync(root + ".another.yet.js"); unlinkSync(root + ".another.yet.js");
} }
var waiter = new Promise<void>((resolve, reject) => { var waiter = new Promise<void>((resolve, reject) => {
setTimeout(async () => { setTimeout(async () => {
resolve(); resolve();
}, 50); }, 50);
}); });
var finished = false; var finished = false;
await Promise.race([ await Promise.race([
waiter, waiter,
(async () => { (async () => {
if (finished) {
return;
}
for await (const line of runner.stdout) {
if (finished) { if (finished) {
return; return;
} }
for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
if (finished) { if (finished) {
return; return;
} }
await onReload();
reloadCounter++; var str = new TextDecoder().decode(line);
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`); for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
if (finished) {
return;
}
await onReload();
reloadCounter++;
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
}
} }
} })(),
})(), ]);
]); finished = true;
finished = true; runner.kill(0);
runner.kill(0); runner.unref();
runner.unref();
expect(reloadCounter).toBe(1); expect(reloadCounter).toBe(1);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
}
}); });
it("should hot reload when a file is deleted and rewritten", async () => { it("should hot reload when a file is deleted and rewritten", async () => {
const root = hotRunnerRoot + ".tmp.js"; try {
copyFileSync(hotRunnerRoot, root); const root = hotRunnerRoot + ".tmp.js";
const runner = spawn({ copyFileSync(hotRunnerRoot, root);
cmd: [bunExe(), "--hot", "run", root], var runner = spawn({
env: bunEnv, cmd: [bunExe(), "--hot", "run", root],
stdout: "pipe", env: bunEnv,
stderr: "inherit", cwd,
stdin: "ignore", stdout: "pipe",
}); stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0; var reloadCounter = 0;
async function onReload() { async function onReload() {
const contents = readFileSync(root, "utf-8"); const contents = readFileSync(root, "utf-8");
rmSync(root); rmSync(root);
writeFileSync(root, contents); writeFileSync(root, contents);
}
for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
} }
if (any) await onReload(); for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
if (any) await onReload();
}
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
} }
rmSync(root);
expect(reloadCounter).toBe(3);
}); });
it("should hot reload when a file is renamed() into place", async () => { it("should hot reload when a file is renamed() into place", async () => {
const root = hotRunnerRoot + ".tmp.js"; const root = hotRunnerRoot + ".tmp.js";
copyFileSync(hotRunnerRoot, root); copyFileSync(hotRunnerRoot, root);
const runner = spawn({ try {
cmd: [bunExe(), "--hot", "run", root], var runner = spawn({
env: bunEnv, cmd: [bunExe(), "--hot", "run", root],
stdout: "pipe", env: bunEnv,
stderr: "inherit", cwd,
stdin: "ignore", stdout: "pipe",
}); stderr: "inherit",
stdin: "ignore",
});
var reloadCounter = 0; var reloadCounter = 0;
async function onReload() { async function onReload() {
const contents = readFileSync(root, "utf-8"); const contents = readFileSync(root, "utf-8");
rmSync(root + ".tmpfile", { force: true }); rmSync(root + ".tmpfile", { force: true });
await 1; await 1;
writeFileSync(root + ".tmpfile", contents); writeFileSync(root + ".tmpfile", contents);
await 1; await 1;
rmSync(root); rmSync(root);
await 1; await 1;
renameSync(root + ".tmpfile", root); renameSync(root + ".tmpfile", root);
await 1; await 1;
}
for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
} }
if (any) await onReload(); for await (const line of runner.stdout) {
var str = new TextDecoder().decode(line);
var any = false;
for (let line of str.split("\n")) {
if (!line.includes("[#!root]")) continue;
reloadCounter++;
if (reloadCounter === 3) {
runner.unref();
runner.kill();
break;
}
expect(line).toContain(`[#!root] Reloaded: ${reloadCounter}`);
any = true;
}
if (any) await onReload();
}
rmSync(root);
expect(reloadCounter).toBe(3);
} finally {
// @ts-ignore
runner?.unref?.();
// @ts-ignore
runner?.kill?.(9);
} }
rmSync(root);
expect(reloadCounter).toBe(3);
}); });

View File

@@ -7,7 +7,7 @@ import { describe, expect, test } from "bun:test";
// before it is actually watching, we need to repeat the operation to avoid // before it is actually watching, we need to repeat the operation to avoid
// a race condition. // a race condition.
function repeat(fn: any) { function repeat(fn: any) {
const interval = setInterval(fn, 20); const interval = setInterval(fn, 20).unref();
return interval; return interval;
} }
const encodingFileName = `新建文夹件.txt`; const encodingFileName = `新建文夹件.txt`;

10
test/preload.ts Normal file
View File

@@ -0,0 +1,10 @@
import * as harness from "./harness";
// We make Bun.env read-only
// so process.env = {} causes them to be out of sync and we assume Bun.env is
for (let key in process.env) {
if (key === "TZ") continue;
delete process.env[key];
}
Bun.$.env(Object.assign(process.env, harness.bunEnv));