mirror of
https://github.com/oven-sh/bun
synced 2026-02-17 06:12:08 +00:00
Compare commits
186 Commits
bun-v1.0.3
...
bun-v1.1.3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5baa2fbb87 | ||
|
|
2615dc742e | ||
|
|
a4e8534779 | ||
|
|
9380e99e2b | ||
|
|
9898e0a731 | ||
|
|
ad6aadf7b2 | ||
|
|
ee05bae2be | ||
|
|
d615c11a57 | ||
|
|
3679f69b70 | ||
|
|
0b0bf353fa | ||
|
|
c4847f464e | ||
|
|
c8d072c2a9 | ||
|
|
f014f35531 | ||
|
|
fd3cd05647 | ||
|
|
20085d8ddc | ||
|
|
5735feac5d | ||
|
|
4ba993be7e | ||
|
|
0b2bb1fdc1 | ||
|
|
b29cf75a24 | ||
|
|
05fb044577 | ||
|
|
8825b29529 | ||
|
|
182b90896f | ||
|
|
40e33da4b4 | ||
|
|
f393f8a065 | ||
|
|
a09c421f2a | ||
|
|
ca1dbb4eb2 | ||
|
|
8a3b6f0439 | ||
|
|
e7d8abb263 | ||
|
|
013bc79f62 | ||
|
|
8326235ecc | ||
|
|
7543bf936a | ||
|
|
06ec233ebe | ||
|
|
0cdad4bebb | ||
|
|
14c23cc429 | ||
|
|
0dfbdc711a | ||
|
|
3cde2365ea | ||
|
|
3cfb2816ac | ||
|
|
c8f5c9f29c | ||
|
|
00f27fbeec | ||
|
|
76795af695 | ||
|
|
a4b151962a | ||
|
|
1cde9bcdac | ||
|
|
0bd7265e8f | ||
|
|
c831dd8db8 | ||
|
|
390441327f | ||
|
|
2e0e9f135b | ||
|
|
36f1bd3694 | ||
|
|
289d23b377 | ||
|
|
bb483e8479 | ||
|
|
268f13765c | ||
|
|
801e475c72 | ||
|
|
a073c85fdb | ||
|
|
8cb9f59753 | ||
|
|
5903a61410 | ||
|
|
b4941cdb0c | ||
|
|
58417217d6 | ||
|
|
2d57f25637 | ||
|
|
83a99bf190 | ||
|
|
e2ffa66bf7 | ||
|
|
8980dc026d | ||
|
|
4192728592 | ||
|
|
bdfbcb1898 | ||
|
|
6e07f9477c | ||
|
|
2dd2fc6ed0 | ||
|
|
9e6e8b0234 | ||
|
|
d53e6d6323 | ||
|
|
1edacc6e49 | ||
|
|
81badbac4c | ||
|
|
7531bfbfe0 | ||
|
|
1a989c9ad2 | ||
|
|
ab7825cca5 | ||
|
|
f02752577b | ||
|
|
c177e054f5 | ||
|
|
a01b01ae72 | ||
|
|
456a32344e | ||
|
|
6164fac256 | ||
|
|
4bbcc39d2f | ||
|
|
62c8c97e24 | ||
|
|
eb708d34ae | ||
|
|
c3ba60eef5 | ||
|
|
7f71f10ad1 | ||
|
|
9939049b85 | ||
|
|
a5c5b5dc61 | ||
|
|
a2835ef098 | ||
|
|
31c4c59740 | ||
|
|
0248e3c2b7 | ||
|
|
d869fcee21 | ||
|
|
55f8ae5aea | ||
|
|
e414d107e6 | ||
|
|
0103e2df73 | ||
|
|
02ad501f9e | ||
|
|
d433a1ada0 | ||
|
|
d712254128 | ||
|
|
a500c69728 | ||
|
|
d30b53591f | ||
|
|
b8389f32ce | ||
|
|
7172013a72 | ||
|
|
8ff7ee03d2 | ||
|
|
5296c26dab | ||
|
|
da6826e2b7 | ||
|
|
a637b4c880 | ||
|
|
d9074dfa5d | ||
|
|
ba9834d746 | ||
|
|
4869ebff24 | ||
|
|
a9804a3a11 | ||
|
|
6bedc23992 | ||
|
|
093e9c2499 | ||
|
|
3047c9005e | ||
|
|
e80e61c9a3 | ||
|
|
e3bf906127 | ||
|
|
4e7ed173ef | ||
|
|
31befad163 | ||
|
|
94b01b2f45 | ||
|
|
9ecb691380 | ||
|
|
fb8a299765 | ||
|
|
40f61ebb91 | ||
|
|
4512a04820 | ||
|
|
1ad6a3dfb9 | ||
|
|
1ae9f998f4 | ||
|
|
d113803777 | ||
|
|
aaef6d350a | ||
|
|
0751581e86 | ||
|
|
c510daac55 | ||
|
|
ec66b07720 | ||
|
|
b53147ad97 | ||
|
|
dea877f19b | ||
|
|
d66ace959d | ||
|
|
db1283e982 | ||
|
|
79ced2767a | ||
|
|
081c7fff00 | ||
|
|
8a12c2992b | ||
|
|
306b87e929 | ||
|
|
fcd7b01dba | ||
|
|
9f20d40678 | ||
|
|
d030cce8bb | ||
|
|
aee8eeaf45 | ||
|
|
a9ad303fe2 | ||
|
|
e808cdb725 | ||
|
|
1675349667 | ||
|
|
264b4be44a | ||
|
|
20fce1a1be | ||
|
|
fa145b2218 | ||
|
|
c8cc134edb | ||
|
|
d9b7b45080 | ||
|
|
43ab5313da | ||
|
|
c7289b2cd6 | ||
|
|
e39a7851c8 | ||
|
|
ccaacdc56c | ||
|
|
c57cd5b6cd | ||
|
|
c71325b52e | ||
|
|
2765347b65 | ||
|
|
39d1287f03 | ||
|
|
d8a09d8517 | ||
|
|
e8bb3389ef | ||
|
|
73fc225c4c | ||
|
|
0eb638c899 | ||
|
|
ee5fd51e88 | ||
|
|
73a55cf075 | ||
|
|
32174a2a44 | ||
|
|
71113182c2 | ||
|
|
940448d6b6 | ||
|
|
be4b47d49d | ||
|
|
f90bb7719b | ||
|
|
7276ff9935 | ||
|
|
2d61c865fc | ||
|
|
7e3e7d2ed4 | ||
|
|
b24d3ba5be | ||
|
|
c6f22de360 | ||
|
|
e124f08caf | ||
|
|
0948727243 | ||
|
|
650dc552d4 | ||
|
|
3e86b38f32 | ||
|
|
cc4479096a | ||
|
|
d14c99510c | ||
|
|
e1e3e41e0f | ||
|
|
d9496c3802 | ||
|
|
931e04b019 | ||
|
|
cee6be12cc | ||
|
|
a54264177b | ||
|
|
5ed609f96b | ||
|
|
f15e7f733d | ||
|
|
3df320d0ed | ||
|
|
bf0e5a82f7 | ||
|
|
06f04b584c | ||
|
|
76ced7c6ed | ||
|
|
e1593ce2e5 |
21
.github/workflows/bun-linux-build.yml
vendored
21
.github/workflows/bun-linux-build.yml
vendored
@@ -233,16 +233,18 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install-dependnecies
|
||||
name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
bun install --cwd=test/js/third_party/prisma --verbose
|
||||
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: Install dependencies (apt-get)
|
||||
run: sudo apt-get update && sudo apt-get install -y openssl
|
||||
- name: Install dependencies (root)
|
||||
run: bun install --verbose
|
||||
- name: Install dependencies (test)
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: Install dependencies (runner)
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- name: Install dependencies (prisma)
|
||||
run: bun install --cwd=test/js/third_party/prisma --verbose
|
||||
# This is disabled because the cores are ~5.5gb each
|
||||
# so it is easy to hit 50gb coredump downloads. Only enable if you need to retrive one
|
||||
|
||||
@@ -260,6 +262,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
18
.github/workflows/bun-mac-aarch64.yml
vendored
18
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -47,8 +47,8 @@ jobs:
|
||||
tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# - name: Checkout submodules
|
||||
# run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -416,12 +416,13 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: "Install dependencies (root)"
|
||||
run: bun install --verbose
|
||||
- name: "Install dependencies (test)"
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: "Install dependencies (runner)"
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -429,6 +430,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
3
.github/workflows/bun-mac-x64-baseline.yml
vendored
3
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -53,6 +53,8 @@ jobs:
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -416,6 +418,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
16
.github/workflows/bun-mac-x64.yml
vendored
16
.github/workflows/bun-mac-x64.yml
vendored
@@ -50,6 +50,8 @@ jobs:
|
||||
tag: bun-obj-darwin-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -400,12 +402,13 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: "Install dependencies (root)"
|
||||
run: bun install --verbose
|
||||
- name: "Install dependencies (test)"
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: "Install dependencies (runner)"
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -413,6 +416,7 @@ jobs:
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
44
.github/workflows/bun-release-test.yml
vendored
Normal file
44
.github/workflows/bun-release-test.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
# This workflow tests bun-release's code and the packages to ensure that npm,
|
||||
# yarn, and pnpm can install bun on all platforms. This does not test that bun
|
||||
# itself works as it hardcodes 1.1.0 as the version to package.
|
||||
name: bun-release-test
|
||||
concurrency: release-test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "packages/bun-release/**"
|
||||
- ".github/workflows/bun-release-test.yml"
|
||||
|
||||
jobs:
|
||||
test-release-script:
|
||||
name: Test Release Script
|
||||
strategy:
|
||||
matrix:
|
||||
machine: [namespace-profile-bun-linux-x64, linux-arm64, macos-arm64, macos-12-large, windows-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.machine }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: "1.1.0"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
run: bun install && npm i -g pnpm yarn npm
|
||||
|
||||
- name: Release
|
||||
run: bun upload-npm -- 1.1.0 test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
57
.github/workflows/bun-windows.yml
vendored
57
.github/workflows/bun-windows.yml
vendored
@@ -18,6 +18,7 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/*.yml"
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
@@ -29,6 +30,7 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/*.yml"
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
@@ -59,6 +61,8 @@ jobs:
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false && git config --global core.eol lf
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +92,7 @@ jobs:
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-windows-msvc
|
||||
GIT_SHA=${{ github.sha }}
|
||||
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
|
||||
CANARY=0
|
||||
ZIG_OPTIMIZE=ReleaseSafe
|
||||
# TODO(@paperdave): enable ASSERTIONS=1
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
@@ -245,7 +249,8 @@ jobs:
|
||||
# $env:AWS_SECRET_ACCESS_KEY="${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}"
|
||||
# $SCCACHE="$PWD/${sczip}/${sczip}/sccache.exe"
|
||||
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
@@ -310,7 +315,8 @@ jobs:
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
@@ -417,19 +423,16 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: secondlife/setup-cygwin@v1
|
||||
- uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# bun install --verbose
|
||||
# bun install --cwd=test --verbose
|
||||
# bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
npm install
|
||||
cd test && npm install
|
||||
cd ../packages/bun-internal-test && npm install
|
||||
cd ../..
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: Install dependencies (root)
|
||||
run: bun install --verbose
|
||||
- name: Install dependencies (test)
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: Install dependencies (runner)
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Run tests
|
||||
env:
|
||||
@@ -437,6 +440,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SHELLOPTS: igncr
|
||||
BUN_PATH_BASE: ${{runner.temp}}
|
||||
BUN_PATH: release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe
|
||||
@@ -460,7 +464,7 @@ jobs:
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
@@ -468,26 +472,29 @@ jobs:
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
- name: Comment on PR
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
message: |
|
||||
### ❌🪟 @${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.regressing_tests == '' && github.event_name == 'pull_request'
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
@@ -497,5 +504,5 @@ jobs:
|
||||
✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
|
||||
312
.gitignore
vendored
312
.gitignore
vendored
@@ -1,169 +1,143 @@
|
||||
.DS_Store
|
||||
zig-cache
|
||||
packages/*/*.wasm
|
||||
*.o
|
||||
*.a
|
||||
profile.json
|
||||
|
||||
.env
|
||||
node_modules
|
||||
.envrc
|
||||
.swcrc
|
||||
yarn.lock
|
||||
dist
|
||||
*.tmp
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
**/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
zig-out
|
||||
pnpm-lock.yaml
|
||||
README.md.template
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/.gitattributes
|
||||
out
|
||||
outdir
|
||||
|
||||
.trace
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
*.trace
|
||||
github
|
||||
out.*
|
||||
out
|
||||
.parcel-cache
|
||||
esbuilddir
|
||||
*.bun
|
||||
parceldist
|
||||
esbuilddir
|
||||
outdir/
|
||||
outcss
|
||||
.next
|
||||
txt.js
|
||||
.idea
|
||||
.vscode/cpp*
|
||||
.vscode/clang*
|
||||
|
||||
node_modules_*
|
||||
*.jsb
|
||||
*.zip
|
||||
bun-zigld
|
||||
bun-singlehtreaded
|
||||
bun-nomimalloc
|
||||
bun-mimalloc
|
||||
examples/lotta-modules/bun-yday
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-nofscache
|
||||
|
||||
src/node-fallbacks/out/*
|
||||
src/node-fallbacks/node_modules
|
||||
sign.json
|
||||
release/
|
||||
*.dmg
|
||||
sign.*.json
|
||||
packages/debug-*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-*/*.o
|
||||
packages/bun-cli/postinstall.js
|
||||
|
||||
packages/bun-cli/bin/*
|
||||
bun-test-scratch
|
||||
misctools/fetch
|
||||
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/tests.zig
|
||||
*.blob
|
||||
src/deps/s2n-tls
|
||||
.npm
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
misctools/sha
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
*.bc
|
||||
|
||||
src/fallback.version
|
||||
src/runtime.version
|
||||
*.sqlite
|
||||
*.database
|
||||
*.db
|
||||
misctools/machbench
|
||||
*.big
|
||||
.eslintcache
|
||||
|
||||
/bun-webkit
|
||||
|
||||
src/deps/c-ares/build
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/debug-bindings-obj
|
||||
|
||||
failing-tests.txt
|
||||
test.txt
|
||||
myscript.sh
|
||||
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/testdir
|
||||
/test.ts
|
||||
/test.js
|
||||
|
||||
src/js/out/modules*
|
||||
src/js/out/functions*
|
||||
src/js/out/tmp
|
||||
src/js/out/DebugPath.h
|
||||
|
||||
make-dev-stats.csv
|
||||
|
||||
.uuid
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
test/js/bun/glob/fixtures
|
||||
*.lib
|
||||
*.pdb
|
||||
CMakeFiles
|
||||
build.ninja
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
CMakeCache.txt
|
||||
cmake_install.cmake
|
||||
compile_commands.json
|
||||
|
||||
*.lib
|
||||
x64
|
||||
**/*.vcxproj*
|
||||
**/*.sln*
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
|
||||
/.webkit-cache
|
||||
/.cache
|
||||
/src/deps/libuv
|
||||
/build-*/
|
||||
/kcov-out
|
||||
|
||||
.vs
|
||||
|
||||
**/.verdaccio-db.json
|
||||
/test-report.md
|
||||
/test-report.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
.trace
|
||||
.uuid
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
*.blob
|
||||
*.bun
|
||||
*.crash
|
||||
*.database
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
*.o
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
*.pdb
|
||||
*.sqlite
|
||||
*.tmp
|
||||
*.trace
|
||||
*.wat
|
||||
*.zip
|
||||
**/.verdaccio-db.json
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
**/*.sln*
|
||||
**/*.vcxproj*
|
||||
**/package-lock.json
|
||||
/.cache
|
||||
/.webkit-cache
|
||||
/build-*/
|
||||
/bun-webkit
|
||||
/kcov-out
|
||||
/src/deps/libuv
|
||||
/test-report.json
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
bun-mimalloc
|
||||
bun-nomimalloc
|
||||
bun-singlehtreaded
|
||||
bun-test-scratch
|
||||
bun-zigld
|
||||
cmake_install.cmake
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
compile_commands.json
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
dist
|
||||
esbuilddir
|
||||
examples/lotta-modules/bun-nofscache
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-yday
|
||||
failing-tests.txt
|
||||
github
|
||||
make-dev-stats.csv
|
||||
misctools/fetch
|
||||
misctools/machbench
|
||||
misctools/sha
|
||||
myscript.sh
|
||||
node_modules
|
||||
node_modules_*
|
||||
out
|
||||
out.*
|
||||
outcss
|
||||
outdir
|
||||
outdir/
|
||||
packages/*/*.wasm
|
||||
packages/bun-*/*.o
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-cli/bin/*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/debug-*
|
||||
parceldist
|
||||
pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/c-ares/build
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/deps/PLCrashReporter/
|
||||
src/deps/s2n-tls
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/fallback.version
|
||||
src/js/out/DebugPath.h
|
||||
src/js/out/functions*
|
||||
src/js/out/modules*
|
||||
src/js/out/tmp
|
||||
src/node-fallbacks/node_modules
|
||||
src/node-fallbacks/out/*
|
||||
src/runtime.version
|
||||
src/tests.zig
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
tsconfig.tsbuildinfo
|
||||
txt.js
|
||||
x64
|
||||
yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
7
.gitmodules
vendored
7
.gitmodules
vendored
@@ -83,3 +83,10 @@ ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "zig"]
|
||||
path = src/deps/zig
|
||||
url = https://github.com/oven-sh/zig
|
||||
branch = bun
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
|
||||
@@ -3,3 +3,4 @@ src/deps
|
||||
test/snapshots
|
||||
test/js/deno
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
|
||||
1
.vscode/c_cpp_properties.json
vendored
1
.vscode/c_cpp_properties.json
vendored
@@ -55,6 +55,7 @@
|
||||
"name": "BunWithJSCDebug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/codegen",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
|
||||
61
.vscode/launch.json
generated
vendored
61
.vscode/launch.json
generated
vendored
@@ -355,6 +355,21 @@
|
||||
"action": "openExternally"
|
||||
}
|
||||
},
|
||||
// bun exec [...]
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun exec [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
// bun test [*]
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -404,6 +419,20 @@
|
||||
"action": "openExternally"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun install [folder]",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
@@ -611,19 +640,16 @@
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file] (fast)",
|
||||
"name": "Windows: bun install",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0"
|
||||
@@ -887,6 +913,29 @@
|
||||
"action": "openExternally"
|
||||
}
|
||||
},
|
||||
// Windows: bun exec [...]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Windows: bun exec [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2"
|
||||
}
|
||||
]
|
||||
},
|
||||
// Windows: bun test [*]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -40,7 +40,7 @@
|
||||
// C++
|
||||
"lldb.verboseLogging": false,
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
"editor.defaultFormatter": "xaver.clang-format"
|
||||
},
|
||||
@@ -55,7 +55,7 @@
|
||||
"prettier.enable": true,
|
||||
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
@@ -72,12 +72,12 @@
|
||||
|
||||
// JSON
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
|
||||
// Markdown
|
||||
"[markdown]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
@@ -94,12 +94,17 @@
|
||||
|
||||
// TOML
|
||||
"[toml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// YAML
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Docker
|
||||
"[dockerfile]": {
|
||||
"editor.formatOnSave": false
|
||||
},
|
||||
|
||||
// Files
|
||||
@@ -148,5 +153,5 @@
|
||||
"WebKit/WebDriver": true,
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
120
CMakeLists.txt
120
CMakeLists.txt
@@ -2,39 +2,8 @@ cmake_minimum_required(VERSION 3.22)
|
||||
cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(Bun_VERSION "1.0.33")
|
||||
set(WEBKIT_TAG 089023cc9078b3aa173869fd6685f3e7bed2a994)
|
||||
|
||||
if(APPLE AND DEFINED ENV{CI})
|
||||
if(ARCH STREQUAL "x86_64")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
|
||||
else()
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT)
|
||||
string(STRIP ${SDKROOT} SDKROOT)
|
||||
message(STATUS "Using SDKROOT: ${SDKROOT}")
|
||||
SET(CMAKE_OSX_SYSROOT ${SDKROOT})
|
||||
|
||||
execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET)
|
||||
string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET)
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET})
|
||||
|
||||
# Check if current version of macOS is less than the deployment target and if so, raise an error
|
||||
execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION)
|
||||
string(STRIP ${MACOS_VERSION} MACOS_VERSION)
|
||||
|
||||
if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET})
|
||||
message(FATAL_ERROR "\nThe current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\nThis makes icucore fail to run at start.\nTo fix this, please either:\n- Upgrade to the latest version of macOS\n- Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
endif()
|
||||
set(Bun_VERSION "1.1.3")
|
||||
set(WEBKIT_TAG e3a2d89a0b1644cc8d5c245bd2ffee4d4bd6c1d5)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -80,6 +49,45 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# --- MacOS SDK ---
|
||||
if(APPLE AND DEFINED ENV{CI})
|
||||
if(ARCH STREQUAL "x86_64")
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "10.14")
|
||||
else()
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET "11.0")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
|
||||
execute_process(COMMAND xcrun --show-sdk-path OUTPUT_VARIABLE SDKROOT)
|
||||
string(STRIP ${SDKROOT} SDKROOT)
|
||||
message(STATUS "MacOS SDK path: ${SDKROOT}")
|
||||
SET(CMAKE_OSX_SYSROOT ${SDKROOT})
|
||||
|
||||
execute_process(COMMAND xcrun --sdk macosx --show-sdk-version OUTPUT_VARIABLE MACOSX_DEPLOYMENT_TARGET)
|
||||
string(STRIP ${MACOSX_DEPLOYMENT_TARGET} MACOSX_DEPLOYMENT_TARGET)
|
||||
set(CMAKE_OSX_DEPLOYMENT_TARGET ${MACOSX_DEPLOYMENT_TARGET})
|
||||
|
||||
# Check if current version of macOS is less than the deployment target and if so, raise an error
|
||||
execute_process(COMMAND sw_vers -productVersion OUTPUT_VARIABLE MACOS_VERSION)
|
||||
string(STRIP ${MACOS_VERSION} MACOS_VERSION)
|
||||
|
||||
if(MACOS_VERSION VERSION_LESS ${MACOSX_DEPLOYMENT_TARGET})
|
||||
message(WARNING
|
||||
"The current version of macOS (${MACOS_VERSION}) is less than the deployment target (${MACOSX_DEPLOYMENT_TARGET}).\n"
|
||||
"The build will be incompatible with your current device due to mismatches in `icucore` versions.\n"
|
||||
"To fix this, please either:\n"
|
||||
" - Upgrade to at least macOS ${MACOSX_DEPLOYMENT_TARGET}\n"
|
||||
" - Use `xcode-select` to switch to an SDK version <= ${MACOS_VERSION}\n"
|
||||
" - Set CMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION} (make sure to build all dependencies with this variable set too)"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(APPLE)
|
||||
message(STATUS "Building for macOS v${CMAKE_OSX_DEPLOYMENT_TARGET}")
|
||||
endif()
|
||||
|
||||
# --- LLVM ---
|
||||
# This detection is a little overkill, but it ensures that the set LLVM_VERSION matches under
|
||||
# any case possible. Sorry for the complexity...
|
||||
@@ -307,6 +315,10 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
|
||||
|
||||
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(NOT ZIG_LIB_DIR)
|
||||
cmake_path(SET ZIG_LIB_DIR NORMALIZE "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/zig/lib")
|
||||
endif()
|
||||
|
||||
if(USE_VALGRIND)
|
||||
# Disable SIMD
|
||||
set(USE_BASELINE_BUILD ON)
|
||||
@@ -543,6 +555,7 @@ else()
|
||||
add_compile_definitions("BUN_DEBUG=1")
|
||||
set(ASSERT_ENABLED "1")
|
||||
endif()
|
||||
message(STATUS "Using WebKit from ${WEBKIT_DIR}")
|
||||
else()
|
||||
if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}")
|
||||
if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$")
|
||||
@@ -737,19 +750,26 @@ if(NOT NO_CODEGEN)
|
||||
file(GLOB BUN_TS_MODULES ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/bun/*.js"
|
||||
"${BUN_SRC}/js/bun/*.ts"
|
||||
"${BUN_SRC}/js/bun/*.js"
|
||||
"${BUN_SRC}/js/builtins/*.ts"
|
||||
"${BUN_SRC}/js/builtins/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
)
|
||||
file(GLOB BUN_TS_FUNCTIONS ${CONFIGURE_DEPENDS} "${BUN_SRC}/js/builtins/*.ts")
|
||||
|
||||
file(GLOB CODEGEN_FILES ${CONFIGURE_DEPENDS} "${BUN_CODEGEN_SRC}/*.ts")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+createInternalModuleById.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+enum.h"
|
||||
@@ -757,10 +777,12 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_WORKDIR}/codegen/NativeModuleImpl.h"
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_WORKDIR}/codegen/SyntheticModuleType.h"
|
||||
"${BUN_WORKDIR}/codegen/GeneratedJS2Native.h"
|
||||
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
|
||||
DEPENDS ${BUN_TS_MODULES} ${CODEGEN_FILES}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Bundling JS modules"
|
||||
COMMENT "Bundling JS"
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -768,15 +790,6 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/InternalModuleRegistry.cpp"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
|
||||
)
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-functions.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
|
||||
DEPENDS ${BUN_TS_FUNCTIONS} ${CODEGEN_FILES}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Bundling JS builtin functions"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp")
|
||||
|
||||
# --- Peechy API ---
|
||||
@@ -844,8 +857,10 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
OUTPUT "${BUN_ZIG_OBJ}"
|
||||
COMMAND
|
||||
"${ZIG_COMPILER}" "build" "obj"
|
||||
"--zig-lib-dir" "${ZIG_LIB_DIR}"
|
||||
"-Doutput-file=${BUN_ZIG_OBJ}"
|
||||
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
|
||||
"-freference-trace=10"
|
||||
"-Dversion=${Bun_VERSION}"
|
||||
"-Dcanary=${CANARY}"
|
||||
"-Doptimize=${ZIG_OPTIMIZE}"
|
||||
@@ -858,6 +873,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_IDENTIFIER_CACHE_OUT}"
|
||||
"${BUN_SRC}/api/schema.zig"
|
||||
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Building zig code"
|
||||
VERBATIM
|
||||
@@ -1002,6 +1018,12 @@ if(CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
-Werror=return-stack-address
|
||||
-Werror=implicit-function-declaration
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Werror=unused
|
||||
-Wno-unused-function
|
||||
-Werror
|
||||
)
|
||||
else()
|
||||
@@ -1017,18 +1039,24 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
|
||||
endif()
|
||||
|
||||
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
|
||||
target_compile_options(${bun} PUBLIC -O3 ${LTO_FLAG} -g1
|
||||
-Werror=return-type
|
||||
-Werror=return-stack-address
|
||||
-Werror=implicit-function-declaration
|
||||
-Werror=uninitialized
|
||||
-Werror=conditional-uninitialized
|
||||
-Werror=suspicious-memaccess
|
||||
-Werror=move
|
||||
-Werror=sometimes-uninitialized
|
||||
-Werror
|
||||
)
|
||||
else()
|
||||
set(LTO_LINK_FLAG "")
|
||||
|
||||
if(USE_LTO)
|
||||
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
|
||||
# -emit-llvm seems to not be supported or under a different name on Windows.
|
||||
list(APPEND LTO_FLAG "-flto=full")
|
||||
list(APPEND LTO_LINK_FLAG "/LTCG")
|
||||
endif()
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ RUN apt-get update -y \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
arm64) variant="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
*) echo "unsupported architecture: $arch"; exit 1 ;; \
|
||||
esac \
|
||||
&& wget "${BUN_DOWNLOAD_URL_BASE}/bun-linux-${variant}.zip" \
|
||||
&& unzip bun-linux-${variant}.zip \
|
||||
@@ -414,7 +414,7 @@ COPY --from=bun-codegen-for-zig ${BUN_DIR}/packages/bun-error/dist ${BUN_DIR}/pa
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN mkdir -p build \
|
||||
&& bun run $BUN_DIR/src/codegen/bundle-modules-fast.ts $BUN_DIR/build \
|
||||
&& bun run $BUN_DIR/src/codegen/bundle-modules.ts --debug=OFF $BUN_DIR/build \
|
||||
&& cd build \
|
||||
&& cmake .. \
|
||||
-G Ninja \
|
||||
@@ -429,6 +429,7 @@ RUN mkdir -p build \
|
||||
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
-DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \
|
||||
&& ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v
|
||||
|
||||
FROM scratch as build_release_obj
|
||||
|
||||
@@ -45,16 +45,17 @@ bunx cowsay 'Hello, world!' # execute a package
|
||||
|
||||
## Install
|
||||
|
||||
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
|
||||
Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
>
|
||||
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,9 +1,10 @@
|
||||
import { mkdirSync, writeFileSync } from "fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { mkdirSync, rmSync, writeFileSync } from "fs";
|
||||
import { cp } from "fs/promises";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
import { join, resolve } from "path";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
import { fileURLToPath } from "url";
|
||||
const hugeDirectory = (() => {
|
||||
const root = join(tmpdir(), "huge");
|
||||
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
|
||||
@@ -18,14 +19,21 @@ const hugeDirectory = (() => {
|
||||
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
|
||||
const hugeText = "Hello, world!".repeat(1000000);
|
||||
writeFileSync(hugeFilePath, hugeText);
|
||||
let base = process.argv.at(-1);
|
||||
if (resolve(base) === fileURLToPath(import.meta.url)) {
|
||||
base = tmpdir();
|
||||
} else {
|
||||
rmSync(base, { recursive: true, force: true });
|
||||
mkdirSync(base, { recursive: true });
|
||||
}
|
||||
|
||||
var hugeCopyI = 0;
|
||||
bench("cp -r (1000 files)", async b => {
|
||||
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
|
||||
await cp(hugeDirectory, join(base, "huge-copy" + hugeCopyI++), { recursive: true });
|
||||
});
|
||||
|
||||
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
|
||||
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
|
||||
await cp(hugeFilePath, join(base, "huge-file" + hugeCopyI++));
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Database } from "https://deno.land/x/sqlite3@0.9.1/mod.ts";
|
||||
import { Database } from "https://deno.land/x/sqlite3@0.11.1/mod.ts";
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const db = new Database("./src/northwind.sqlite");
|
||||
|
||||
@@ -629,3 +629,5 @@ pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, obj_step: *
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
// !
|
||||
@@ -96,6 +96,10 @@ FROM alpine:3.18
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
COPY docker-entrypoint.sh /usr/local/bin/
|
||||
|
||||
|
||||
@@ -62,6 +62,10 @@ FROM debian:bullseye-slim
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ FROM debian:bullseye-slim AS build
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
# Node.js includes python3 for node-gyp, see https://github.com/oven-sh/bun/issues/9807
|
||||
# Though, not on slim and alpine images.
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
ca-certificates \
|
||||
@@ -11,6 +13,7 @@ RUN apt-get update -qq \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
unzip \
|
||||
python3 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
@@ -63,6 +66,10 @@ COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
|
||||
@@ -62,6 +62,10 @@ FROM gcr.io/distroless/base-nossl-debian11
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
|
||||
@@ -58,17 +58,18 @@ Pass a path to the shared library and a map of symbols to import into `dlopen`:
|
||||
|
||||
```ts
|
||||
import { dlopen, FFIType, suffix } from "bun:ffi";
|
||||
const { i32 } = FFIType;
|
||||
|
||||
const path = `libadd.${suffix}`;
|
||||
|
||||
const lib = dlopen(path, {
|
||||
add: {
|
||||
args: [FFIType.i32, FFIType.i32],
|
||||
returns: FFIType.i32,
|
||||
args: [i32, i32],
|
||||
returns: i32,
|
||||
},
|
||||
});
|
||||
|
||||
lib.symbols.add(1, 2);
|
||||
console.log(lib.symbols.add(1, 2));
|
||||
```
|
||||
|
||||
### Rust
|
||||
@@ -76,7 +77,7 @@ lib.symbols.add(1, 2);
|
||||
```rust
|
||||
// add.rs
|
||||
#[no_mangle]
|
||||
pub extern "C" fn add(a: isize, b: isize) -> isize {
|
||||
pub extern "C" fn add(a: i32, b: i32) -> i32 {
|
||||
a + b
|
||||
}
|
||||
```
|
||||
@@ -87,6 +88,22 @@ To compile:
|
||||
$ rustc --crate-type cdylib add.rs
|
||||
```
|
||||
|
||||
### C++
|
||||
|
||||
```c
|
||||
#include <cstdint>
|
||||
|
||||
extern "C" int32_t add(int32_t a, int32_t b) {
|
||||
return a + b;
|
||||
}
|
||||
```
|
||||
|
||||
To compile:
|
||||
|
||||
```bash
|
||||
$ zig build-lib add.cpp -dynamic -lc -lc++
|
||||
```
|
||||
|
||||
## FFI types
|
||||
|
||||
The following `FFIType` values are supported.
|
||||
|
||||
@@ -6,12 +6,12 @@ Bun implements the following properties.
|
||||
import.meta.dir; // => "/path/to/project"
|
||||
import.meta.file; // => "file.ts"
|
||||
import.meta.path; // => "/path/to/project/file.ts"
|
||||
import.meta.url; // => "file:///path/to/project/file.ts"
|
||||
|
||||
import.meta.main; // `true` if this file is directly executed by `bun run`
|
||||
// `false` otherwise
|
||||
|
||||
import.meta.resolveSync("zod")
|
||||
// resolve an import specifier relative to the directory
|
||||
import.meta.resolve("zod"); // => "file:///path/to/project/node_modules/zod/index.js"
|
||||
```
|
||||
|
||||
{% table %}
|
||||
@@ -28,13 +28,18 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.file`
|
||||
- The name of the current file, e.g. `index.tsx`
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.path`
|
||||
- Absolute path to the current file, e.g. `/path/to/project/index.tx`. Equivalent to `__filename` in CommonJS modules (and Node.js)
|
||||
- Absolute path to the current file, e.g. `/path/to/project/index.ts`. Equivalent to `__filename` in CommonJS modules (and Node.js)
|
||||
|
||||
---
|
||||
|
||||
@@ -43,30 +48,22 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.url`
|
||||
- A string url to the current file, e.g. `file:///path/to/project/index.tx`
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.main`
|
||||
- `boolean` Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
|
||||
- Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.resolve{Sync}`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file?
|
||||
- `import.meta.resolve`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to a url. Equivalent to [`import.meta.resolve` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#resolve)
|
||||
|
||||
```ts
|
||||
import.meta.resolveSync("zod");
|
||||
// => "/path/to/project/node_modules/zod/index.ts"
|
||||
|
||||
import.meta.resolveSync("./file.tsx");
|
||||
// => "/path/to/project/file.tsx"
|
||||
import.meta.resolve("zod");
|
||||
// => "file:///path/to/project/node_modules/zod/index.ts"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.url`
|
||||
- A `string` url to the current file, e.g. `file:///path/to/project/index.ts`. Equivalent to [`import.meta.url` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#url)
|
||||
|
||||
{% /table %}
|
||||
|
||||
@@ -186,6 +186,7 @@ proc.unref();
|
||||
## Inter-process communication (IPC)
|
||||
|
||||
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
|
||||
|
||||
{%callout%}
|
||||
**Note** — This API is only compatible with other `bun` processes. Use `process.execPath` to get a path to the currently running `bun` executable.
|
||||
{%/callout%}
|
||||
@@ -227,8 +228,6 @@ process.on("message", (message) => {
|
||||
});
|
||||
```
|
||||
|
||||
All messages are serialized using the JSC `serialize` API, which allows for the same set of [transferrable types](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects) supported by `postMessage` and `structuredClone`, including strings, typed arrays, streams, and objects.
|
||||
|
||||
```ts#child.ts
|
||||
// send a string
|
||||
process.send("Hello from child as string");
|
||||
@@ -237,6 +236,11 @@ process.send("Hello from child as string");
|
||||
process.send({ message: "Hello from child as object" });
|
||||
```
|
||||
|
||||
The `ipcMode` option controls the underlying communication format between the two processes:
|
||||
|
||||
- `advanced`: (default) Messages are serialized using the JSC `serialize` API, which supports cloning [everything `structuredClone` supports](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm). This does not support transferring ownership of objects.
|
||||
- `json`: Messages are serialized using `JSON.stringify` and `JSON.parse`, which does not support as many object types as `advanced` does.
|
||||
|
||||
## Blocking API (`Bun.spawnSync()`)
|
||||
|
||||
Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways.
|
||||
|
||||
@@ -115,7 +115,7 @@ Use `Bun.connect` to connect to a TCP server. Specify the server to connect to w
|
||||
|
||||
```ts
|
||||
// The client
|
||||
const socket = Bun.connect({
|
||||
const socket = await Bun.connect({
|
||||
hostname: "localhost",
|
||||
port: 8080,
|
||||
|
||||
@@ -138,7 +138,7 @@ To require TLS, specify `tls: true`.
|
||||
|
||||
```ts
|
||||
// The client
|
||||
const socket = Bun.connect({
|
||||
const socket = await Bun.connect({
|
||||
// ... config
|
||||
tls: true,
|
||||
});
|
||||
@@ -164,7 +164,7 @@ server.reload({
|
||||
```
|
||||
|
||||
```ts#Client
|
||||
const socket = Bun.connect({ /* config */ })
|
||||
const socket = await Bun.connect({ /* config */ })
|
||||
socket.reload({
|
||||
data(){
|
||||
// new 'data' handler
|
||||
|
||||
@@ -635,7 +635,7 @@ Bun.resolveSync("zod", "/path/to/project");
|
||||
// => "/path/to/project/node_modules/zod/index.ts"
|
||||
```
|
||||
|
||||
To resolve relative to the current working directory, pass `process.cwd` or `"."` as the root.
|
||||
To resolve relative to the current working directory, pass `process.cwd()` or `"."` as the root.
|
||||
|
||||
```ts
|
||||
Bun.resolveSync("./foo.ts", process.cwd());
|
||||
|
||||
@@ -26,7 +26,6 @@ All imported files and packages are bundled into the executable, along with a co
|
||||
**Note** — Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
|
||||
|
||||
- `--outdir` — use `outfile` instead.
|
||||
- `--external`
|
||||
- `--splitting`
|
||||
- `--public-path`
|
||||
|
||||
|
||||
@@ -156,7 +156,7 @@ Like the Bun runtime, the bundler supports an array of file types out of the box
|
||||
---
|
||||
|
||||
- `.js` `.jsx`, `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx`
|
||||
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms, including dead code elimination, tree shaking, and environment variable inlining. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
|
||||
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms including dead code elimination and tree shaking. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ Bun uses the file extension to determine which built-in _loader_ should be used
|
||||
|
||||
**JavaScript**. Default for `.cjs` and `.mjs`.
|
||||
|
||||
Parses the code and applies a set of default transforms, like dead-code elimination, tree shaking, and environment variable inlining. Note that Bun does not attempt to down-convert syntax at the moment.
|
||||
Parses the code and applies a set of default transforms like dead-code elimination and tree shaking. Note that Bun does not attempt to down-convert syntax at the moment.
|
||||
|
||||
### `jsx`
|
||||
|
||||
@@ -178,7 +178,7 @@ In the bundler, `.node` files are handled using the [`file`](#file) loader.
|
||||
In the runtime and bundler, SQLite databases can be directly imported. This will load the database using [`bun:sqlite`](/docs/api/sqlite.md).
|
||||
|
||||
```ts
|
||||
import db from "./my.db" with {type: "sqlite"};
|
||||
import db from "./my.db" with { type: "sqlite" };
|
||||
```
|
||||
|
||||
This is only supported when the `target` is `bun`.
|
||||
@@ -189,21 +189,21 @@ You can change this behavior with the `"embed"` attribute:
|
||||
|
||||
```ts
|
||||
// embed the database into the bundle
|
||||
import db from "./my.db" with {type: "sqlite", embed: "true"};
|
||||
import db from "./my.db" with { type: "sqlite", embed: "true" };
|
||||
```
|
||||
|
||||
When using a [standalone executable](/docs/bundler/executables), the database is embedded into the single-file executable.
|
||||
|
||||
Otherwise, the database to embed is copied into the `outdir` with a hashed filename.
|
||||
|
||||
### `bunshell` loader
|
||||
### `sh` loader
|
||||
|
||||
**Bun Shell loader**. Default for `.bun.sh` files
|
||||
**Bun Shell loader**. Default for `.sh` files
|
||||
|
||||
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting bun itself, so it's not available in the bundler or in the runtime.
|
||||
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting Bun itself, so it's not available in the bundler or in the runtime.
|
||||
|
||||
```sh
|
||||
$ bun run ./script.bun.sh
|
||||
$ bun run ./script.sh
|
||||
```
|
||||
|
||||
### `file`
|
||||
|
||||
@@ -7,7 +7,7 @@ There are a few behavioral differences to note.
|
||||
|
||||
## Performance
|
||||
|
||||
With an performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
|
||||
With a performance-minded API coupled with the extensively optimized Zig-based JS/TS parser, Bun's bundler is 1.75x faster than esbuild on esbuild's [three.js benchmark](https://github.com/oven-sh/bun/tree/main/bench/bundle).
|
||||
|
||||
{% image src="/images/bundler-speed.png" caption="Bundling 10 copies of three.js from scratch, with sourcemaps and minification" /%}
|
||||
|
||||
|
||||
@@ -77,4 +77,4 @@ Bun automatically loads environment variables from `.env` files before running a
|
||||
2. `NODE_ENV` === `"production"` ? `.env.production` : `.env.development`
|
||||
3. `.env`
|
||||
|
||||
To debug environment variables, run `bun run env` to view a list of resolved environment variables. -->
|
||||
To debug environment variables, run `bun --print process.env` to view a list of resolved environment variables. -->
|
||||
|
||||
@@ -165,7 +165,7 @@ You can also use `bun run -` to redirect files into Bun. For example, to run a `
|
||||
```bash
|
||||
$ echo "console.log!('This is TypeScript!' as any)" > secretly-typescript.js
|
||||
$ bun run - < secretly-typescript.js
|
||||
Hello
|
||||
This is TypeScript!
|
||||
```
|
||||
|
||||
For convenience, all code is treated as TypeScript with JSX support when using `bun run -`.
|
||||
|
||||
@@ -18,10 +18,10 @@ Bun.env.API_TOKEN; // => "secret"
|
||||
|
||||
---
|
||||
|
||||
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
|
||||
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
|
||||
|
||||
```sh
|
||||
$ bun run env
|
||||
$ bun --print process.env
|
||||
BAZ=stuff
|
||||
FOOBAR=aaaaaa
|
||||
<lots more lines>
|
||||
|
||||
@@ -37,4 +37,4 @@ for await (const line of $`ls -l`.lines()) {
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Shell](/api/shell) for complete documentation.
|
||||
See [Docs > API > Shell](/docs/runtime/shell) for complete documentation.
|
||||
|
||||
@@ -16,7 +16,7 @@ Below is the full set of recommended `compilerOptions` for a Bun project. With t
|
||||
{
|
||||
"compilerOptions": {
|
||||
// Enable latest features
|
||||
"lib": ["ESNext"],
|
||||
"lib": ["ESNext","DOM"],
|
||||
"target": "ESNext",
|
||||
"module": "ESNext",
|
||||
"moduleDetection": "force",
|
||||
|
||||
@@ -4,7 +4,7 @@ name: Migrate from Jest to Bun's test runner
|
||||
|
||||
In many cases, Bun's test runner can run Jest test suites with no code changes. Just run `bun test` instead of `npx jest`, `yarn test`, etc.
|
||||
|
||||
```sh-diff
|
||||
```sh
|
||||
- $ npx jest
|
||||
- $ yarn test
|
||||
+ $ bun test
|
||||
@@ -57,7 +57,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
|
||||
- };
|
||||
``` -->
|
||||
|
||||
```sh-diff
|
||||
```sh
|
||||
$ bun test --bail 3
|
||||
```
|
||||
|
||||
|
||||
@@ -12,4 +12,4 @@ Bun.which("bun"); // => "/home/user/.bun/bin/bun"
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Utils](/api/utils#bun-which) for complete documentation.
|
||||
See [Docs > API > Utils](/docs/api/utils#bun-which) for complete documentation.
|
||||
|
||||
@@ -42,21 +42,20 @@ $ proto install bun
|
||||
Bun requires a minimum of Windows 10 version 1809
|
||||
{% /callout %}
|
||||
|
||||
Bun provides a _limited, experimental_ native build for Windows. It is recommended to use Bun within [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install) and follow the above instructions. To help catch bugs, the experimental build enables many debugging assertions, which will make the binary slower than what the stable version will be.
|
||||
|
||||
To install, paste this into a terminal:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```powershell#PowerShell/cmd.exe
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
powershell -c "irm bun.sh/install.ps1|iex"
|
||||
> powershell -c "irm bun.sh/install.ps1|iex"
|
||||
```
|
||||
|
||||
```powershell#npm
|
||||
> npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
```powershell#Scoop
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
scoop bucket add versions
|
||||
scoop install bun-canary
|
||||
> scoop install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -145,6 +144,8 @@ $ bun upgrade
|
||||
{% callout %}
|
||||
**Homebrew users** — To avoid conflicts with Homebrew, use `brew upgrade bun` instead.
|
||||
|
||||
**Scoop users** — To avoid conflicts with Scoop, use `scoop upgrade bun` instead.
|
||||
|
||||
**proto users** - Use `proto install bun --pin` instead.
|
||||
{% /callout %}
|
||||
|
||||
@@ -232,11 +233,15 @@ If you need to remove Bun from your system, use the following commands.
|
||||
$ rm -rf ~/.bun # for macOS, Linux, and WSL
|
||||
```
|
||||
|
||||
```bash#Windows
|
||||
$ Remove-Item ~\.bun -Recurse
|
||||
```powershell#Windows
|
||||
> powershell -c ~\.bun\uninstall.ps1
|
||||
```
|
||||
|
||||
```bash#NPM
|
||||
```powershell#Scoop
|
||||
> scoop uninstall bun
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm uninstall -g bun
|
||||
```
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ I recommend using VSCode through SSH instead of Tunnels or the Tailscale extensi
|
||||
By default, running unverified scripts are blocked.
|
||||
|
||||
```ps1
|
||||
Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
> Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
```
|
||||
|
||||
### System Dependencies
|
||||
@@ -47,7 +47,7 @@ Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
- Bun 1.1 or later. We use Bun to run it's own code generators.
|
||||
|
||||
```ps1
|
||||
irm bun.sh/install.ps1 | iex
|
||||
> irm bun.sh/install.ps1 | iex
|
||||
```
|
||||
|
||||
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
|
||||
@@ -70,28 +70,28 @@ The Zig compiler is automatically downloaded, installed, and updated by the buil
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily:
|
||||
|
||||
```ps1
|
||||
irm https://get.scoop.sh | iex
|
||||
|
||||
scoop install nodejs-lts go rust nasm ruby perl
|
||||
scoop llvm@16.0.4 # scoop bug if you install llvm and the rest at the same time
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop llvm@16.0.4
|
||||
```
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
```ps1
|
||||
scoop install make cygwin python
|
||||
> scoop install make cygwin python
|
||||
```
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\env.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
.\scripts\env.ps1
|
||||
> .\scripts\env.ps1
|
||||
```
|
||||
|
||||
To verify, you can check for an MSVC-only command line such as `mt.exe`
|
||||
|
||||
```ps1
|
||||
Get-Command mt
|
||||
> Get-Command mt
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
@@ -101,24 +101,24 @@ It is not recommended to install `ninja` / `cmake` into your global path, becaus
|
||||
## Building
|
||||
|
||||
```ps1
|
||||
bun install
|
||||
> bun install
|
||||
|
||||
.\scripts\env.ps1
|
||||
.\scripts\update-submodules.ps1 # this syncs git submodule state
|
||||
.\scripts\all-dependencies.ps1 # this builds all dependencies
|
||||
.\scripts\make-old-js.ps1 # runs some old code generators
|
||||
> .\scripts\env.ps1
|
||||
> .\scripts\update-submodules.ps1 # this syncs git submodule state
|
||||
> .\scripts\all-dependencies.ps1 # this builds all dependencies
|
||||
> .\scripts\make-old-js.ps1 # runs some old code generators
|
||||
|
||||
# Configure build environment
|
||||
cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
|
||||
> cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
|
||||
|
||||
# Build bun
|
||||
ninja -Cbuild
|
||||
> ninja -Cbuild
|
||||
```
|
||||
|
||||
If this was successful, you should have a `bun-debug.exe` in the `build` folder.
|
||||
|
||||
```ps1
|
||||
.\build\bun-debug.exe --revision
|
||||
> .\build\bun-debug.exe --revision
|
||||
```
|
||||
|
||||
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
|
||||
@@ -134,15 +134,15 @@ You can run the test suite either using `bun test`, or by using the wrapper scri
|
||||
|
||||
```ps1
|
||||
# Setup
|
||||
bun i --cwd packages\bun-internal-test
|
||||
> bun i --cwd packages\bun-internal-test
|
||||
|
||||
# Run the entire test suite with reporter
|
||||
# the package.json script "test" uses "build/bun-debug.exe" by default
|
||||
bun run test
|
||||
> bun run test
|
||||
|
||||
# Run an individual test file:
|
||||
bun-debug test node\fs
|
||||
bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
> bun-debug test node\fs
|
||||
> bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -30,32 +30,25 @@ $ sudo zypper install go cmake ninja automake git rustup && rustup toolchain ins
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> **Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
|
||||
|
||||
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#Native
|
||||
$ curl -fsSL https://bun.sh/install | bash # for macOS, Linux, and WSL
|
||||
$ curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm install -g bun # the last `npm` command you'll ever need
|
||||
$ npm install -g bun
|
||||
```
|
||||
|
||||
```bash#Homebrew
|
||||
$ brew tap oven-sh/bun # for macOS and Linux
|
||||
$ brew tap oven-sh/bun
|
||||
$ brew install bun
|
||||
```
|
||||
|
||||
```bash#Docker
|
||||
$ docker pull oven/bun
|
||||
$ docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```
|
||||
|
||||
```bash#proto
|
||||
$ proto install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Install LLVM
|
||||
@@ -144,12 +137,14 @@ $ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug
|
||||
$ ninja -C build # 'bun run build' runs just this
|
||||
```
|
||||
|
||||
Advanced uses can pass CMake flags to customize the build.
|
||||
Advanced users can pass CMake flags to customize the build.
|
||||
|
||||
## VSCode
|
||||
|
||||
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
|
||||
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig` (`zig.exe` on Windows).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
{% callout %}
|
||||
@@ -312,8 +307,7 @@ $ xcode-select --install
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
```bash
|
||||
$ cmake -Bbuild -GNinja -DUSE_STATIC_LIBATOMIC=ON
|
||||
$ ninja -Cbuild
|
||||
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
@@ -98,10 +98,10 @@ Bun.env.API_TOKEN; // => "secret"
|
||||
import.meta.env.API_TOKEN; // => "secret"
|
||||
```
|
||||
|
||||
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
|
||||
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
|
||||
|
||||
```sh
|
||||
$ bun run env
|
||||
$ bun --print process.env
|
||||
BAZ=stuff
|
||||
FOOBAR=aaaaaa
|
||||
<lots more lines>
|
||||
|
||||
@@ -18,7 +18,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:child_process`](https://nodejs.org/api/child_process.html)
|
||||
|
||||
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC has partial support and only current only works with other `bun` processes.
|
||||
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC cannot send socket handles and only works with other `bun` processes.
|
||||
|
||||
### [`node:cluster`](https://nodejs.org/api/cluster.html)
|
||||
|
||||
@@ -56,7 +56,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:fs`](https://nodejs.org/api/fs.html)
|
||||
|
||||
🟡 Missing `Dir` `openAsBlob` `opendir` `opendirSync` `statfs` `statfsSync`
|
||||
🟡 Missing `statfs` `statfsSync`, `opendirSync`. `Dir` is partially implemented.
|
||||
|
||||
### [`node:http`](https://nodejs.org/api/http.html)
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ Plugins are primarily used to extend Bun with loaders for additional file types.
|
||||
```ts#yamlPlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
await plugin({
|
||||
name: "YAML",
|
||||
async setup(build) {
|
||||
const { load } = await import("js-yaml");
|
||||
@@ -179,7 +179,7 @@ Loading a YAML file is useful, but plugins support more than just data loading.
|
||||
```ts#sveltePlugin.ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
await plugin({
|
||||
name: "svelte loader",
|
||||
async setup(build) {
|
||||
const { compile } = await import("svelte/compiler");
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
Bun Shell makes shell scripting with JavaScript & TypeScript fun. It's a cross-platform bash-like shell with seamless JavaScript interop.
|
||||
|
||||
{% callout type="note" %}
|
||||
**Alpha-quality software**: Bun Shell is an unstable API still under development. If you have feature requests or run into bugs, please open an issue. There may be breaking changes in the future.
|
||||
{% /callout %}
|
||||
|
||||
Quickstart:
|
||||
|
||||
```js
|
||||
@@ -12,7 +8,7 @@ import { $ } from "bun";
|
||||
const response = await fetch("https://example.com");
|
||||
|
||||
// Use Response as stdin.
|
||||
await $`echo < ${response} > wc -c`; // 120
|
||||
await $`cat < ${response} | wc -c`; // 1256
|
||||
```
|
||||
|
||||
## Features:
|
||||
@@ -23,6 +19,8 @@ await $`echo < ${response} > wc -c`; // 120
|
||||
- **Template literals**: Template literals are used to execute shell commands. This allows for easy interpolation of variables and expressions.
|
||||
- **Safety**: Bun Shell escapes all strings by default, preventing shell injection attacks.
|
||||
- **JavaScript interop**: Use `Response`, `ArrayBuffer`, `Blob`, `Bun.file(path)` and other JavaScript objects as stdin, stdout, and stderr.
|
||||
- **Shell scripting**: Bun Shell can be used to run shell scripts (`.bun.sh` files).
|
||||
- **Custom interpreter**: Bun Shell is written in Zig, along with it's lexer, parser, and interpreter. Bun Shell is a small programming language.
|
||||
|
||||
## Getting started
|
||||
|
||||
@@ -53,30 +51,81 @@ const welcome = await $`echo "Hello World!"`.text();
|
||||
console.log(welcome); // Hello World!\n
|
||||
```
|
||||
|
||||
To get stdout, stderr, and the exit code, use await or `.run`:
|
||||
By default, `await`ing will return stdout and stderr as `Buffer`s.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
const { stdout, stderr, exitCode } = await $`echo "Hello World!"`.quiet();
|
||||
const { stdout, stderr } = await $`echo "Hello World!"`.quiet();
|
||||
|
||||
console.log(stdout); // Buffer(6) [ 72, 101, 108, 108, 111, 32 ]
|
||||
console.log(stderr); // Buffer(0) []
|
||||
console.log(exitCode); // 0
|
||||
```
|
||||
|
||||
## Error handling
|
||||
|
||||
By default, non-zero exit codes will throw an error. This `ShellError` contains information about the command run.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
try {
|
||||
const output = await $`something-that-may-fail`.text();
|
||||
console.log(output);
|
||||
} catch (err) {
|
||||
console.log(`Failed with code ${err.exitCode}`);
|
||||
console.log(output.stdout.toString());
|
||||
console.log(output.stderr.toString());
|
||||
}
|
||||
```
|
||||
|
||||
Throwing can be disabled with `.nothrow()`. The result's `exitCode` will need to be checked manually.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
const { stdout, stderr, exitCode } = await $`something-that-may-fail`
|
||||
.nothrow()
|
||||
.quiet();
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log(`Non-zero exit code ${exitCode}`);
|
||||
}
|
||||
|
||||
console.log(stdout);
|
||||
console.log(stderr);
|
||||
```
|
||||
|
||||
The default handling of non-zero exit codes can be configured by calling `.nothrow()` or `.throws(boolean)` on the `$` function itself.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
// shell promises will not throw, meaning you will have to
|
||||
// check for `exitCode` manually on every shell command.
|
||||
$.nothrow(); // equivilent to $.throws(false)
|
||||
|
||||
// default behavior, non-zero exit codes will throw an error
|
||||
$.throws(true);
|
||||
|
||||
// alias for $.nothrow()
|
||||
$.throws(false);
|
||||
|
||||
await $`something-that-may-fail`; // No exception thrown
|
||||
```
|
||||
|
||||
## Redirection
|
||||
|
||||
A command's _input_ or _output_ may be _redirected_ using the typical Bash operators:
|
||||
- `<` redirect stdin
|
||||
- `>` or `1>` redirect stdout
|
||||
- `2>` redirect stderr
|
||||
- `&>` redirect both stdout and stderr
|
||||
|
||||
- `<` redirect stdin
|
||||
- `>` or `1>` redirect stdout
|
||||
- `2>` redirect stderr
|
||||
- `&>` redirect both stdout and stderr
|
||||
- `>>` or `1>>` redirect stdout, _appending_ to the destination, instead of overwriting
|
||||
- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting
|
||||
- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting
|
||||
- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr)
|
||||
- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout)
|
||||
- `2>>` redirect stderr, _appending_ to the destination, instead of overwriting
|
||||
- `&>>` redirect both stdout and stderr, _appending_ to the destination, instead of overwriting
|
||||
- `1>&2` redirect stdout to stderr (all writes to stdout will instead be in stderr)
|
||||
- `2>&1` redirect stderr to stdout (all writes to stderr will instead be in stdout)
|
||||
|
||||
Bun Shell also supports redirecting from and to JavaScript objects.
|
||||
|
||||
@@ -88,9 +137,8 @@ To redirect stdout to a JavaScript object, use the `>` operator:
|
||||
import { $ } from "bun";
|
||||
|
||||
const buffer = Buffer.alloc(100);
|
||||
const result = await $`echo "Hello World!" > ${buffer}`;
|
||||
await $`echo "Hello World!" > ${buffer}`;
|
||||
|
||||
console.log(result.exitCode); // 0
|
||||
console.log(buffer.toString()); // Hello World!\n
|
||||
```
|
||||
|
||||
@@ -104,7 +152,7 @@ The following JavaScript objects are supported for redirection to:
|
||||
To redirect the output from JavaScript objects to stdin, use the `<` operator:
|
||||
|
||||
```js
|
||||
import { $, file } from "bun";
|
||||
import { $ } from "bun";
|
||||
|
||||
const response = new Response("hello i am a response body");
|
||||
|
||||
@@ -122,46 +170,45 @@ The following JavaScript objects are supported for redirection from:
|
||||
### Example: Redirect stdin -> file
|
||||
|
||||
```js
|
||||
import { $ } from "bun"
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`cat < myfile.txt`
|
||||
await $`cat < myfile.txt`;
|
||||
```
|
||||
|
||||
### Example: Redirect stdout -> file
|
||||
|
||||
```js
|
||||
import { $ } from "bun"
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`echo bun! > greeting.txt`
|
||||
await $`echo bun! > greeting.txt`;
|
||||
```
|
||||
|
||||
### Example: Redirect stderr -> file
|
||||
|
||||
```js
|
||||
import { $ } from "bun"
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`bun run index.ts 2> errors.txt`
|
||||
await $`bun run index.ts 2> errors.txt`;
|
||||
```
|
||||
|
||||
|
||||
### Example: Redirect stdout -> stderr
|
||||
|
||||
```js
|
||||
import { $ } from "bun"
|
||||
import { $ } from "bun";
|
||||
|
||||
// redirects stderr to stdout, so all output
|
||||
// will be available on stdout
|
||||
await $`bun run ./index.ts 2>&1`
|
||||
await $`bun run ./index.ts 2>&1`;
|
||||
```
|
||||
|
||||
### Example: Redirect stderr -> stdout
|
||||
|
||||
```js
|
||||
import { $ } from "bun"
|
||||
import { $ } from "bun";
|
||||
|
||||
// redirects stdout to stderr, so all output
|
||||
// will be available on stderr
|
||||
await $`bun run ./index.ts 1>&2`
|
||||
await $`bun run ./index.ts 1>&2`;
|
||||
```
|
||||
|
||||
## Piping (`|`)
|
||||
@@ -352,6 +399,18 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
|
||||
- `echo`: print text
|
||||
- `pwd`: print the working directory
|
||||
- `bun`: run bun in bun
|
||||
- `cat`
|
||||
- `touch`
|
||||
- `mkdir`
|
||||
- `which`
|
||||
- `mv`
|
||||
- `exit`
|
||||
- `true`
|
||||
- `false`
|
||||
- `yes`
|
||||
- `seq`
|
||||
- `dirname`
|
||||
- `basename`
|
||||
|
||||
**Partially** implemented:
|
||||
|
||||
@@ -359,9 +418,7 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
|
||||
|
||||
**Not** implemented yet, but planned:
|
||||
|
||||
- `mkdir`: create directories
|
||||
- `cp`: copy files and directories
|
||||
- `cat`: concatenate files
|
||||
- See https://github.com/oven-sh/bun/issues/9716 for the full list.
|
||||
|
||||
## Utilities
|
||||
|
||||
@@ -385,7 +442,7 @@ Exposes Bun Shell's escaping logic as a function:
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
console.log($.escape('$(foo) `bar` "baz"'))
|
||||
console.log($.escape('$(foo) `bar` "baz"'));
|
||||
// => \$(foo) \`bar\` \"baz\"
|
||||
```
|
||||
|
||||
@@ -394,7 +451,7 @@ If you do not want your string to be escaped, wrap it in a `{ raw: 'str' }` obje
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`
|
||||
await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`;
|
||||
// => bun: command not found: foo
|
||||
// => bun: command not found: bar
|
||||
// => baz
|
||||
@@ -417,11 +474,15 @@ Hello World! pwd=/home/demo
|
||||
|
||||
Scripts with Bun Shell are cross platform, which means they work on Windows:
|
||||
|
||||
```
|
||||
PS C:\Users\Demo> bun .\script.sh
|
||||
```powershell
|
||||
> bun .\script.sh
|
||||
Hello World! pwd=C:\Users\Demo
|
||||
```
|
||||
|
||||
## Implementation notes
|
||||
|
||||
Bun Shell is a small programming language in Bun that is implemented in Zig. It includes a handwritten lexer, parser, and interpreter. Unlike bash, zsh, and other shells, Bun Shell runs operations concurrently.
|
||||
|
||||
## Credits
|
||||
|
||||
Large parts of this API were inspired by [zx](https://github.com/google/zx), [dax](https://github.com/dsherret/dax), and [bnx](https://github.com/wobsoriano/bnx). Thank you to the authors of those projects.
|
||||
|
||||
@@ -57,7 +57,7 @@ coverageThreshold = { lines = 0.9, functions = 0.9 }
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `false`; this will rarely be desirable outside of advanced use cases.
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
|
||||
@@ -156,6 +156,8 @@ test.if(macOS)("runs on macOS", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.skipIf`
|
||||
|
||||
To instead skip a test based on some condition, use `test.skipIf()` or `describe.skipIf()`.
|
||||
|
||||
```ts
|
||||
@@ -166,16 +168,32 @@ test.skipIf(macOS)("runs on non-macOS", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.todoIf`
|
||||
|
||||
If instead you want to mark the test as TODO, use `test.todoIf()` or `describe.todoIf()`. Carefully choosing `skipIf` or `todoIf` can show a difference between, for example, intent of "invalid for this target" and "planned but not implemented yet."
|
||||
|
||||
```ts
|
||||
const macOS = process.arch === "darwin";
|
||||
|
||||
// TODO: we've only implemented this for Linux so far.
|
||||
test.todoIf(macOS)("runs on posix", () => {
|
||||
// runs if *not* macOS
|
||||
});
|
||||
```
|
||||
|
||||
## `test.each`
|
||||
|
||||
To return a function for multiple cases in a table of tests, use `test.each`.
|
||||
|
||||
```ts
|
||||
const cases = [[1, 2, 3], [3, 4, 5]];
|
||||
const cases = [
|
||||
[1, 2, 3],
|
||||
[3, 4, 5],
|
||||
];
|
||||
|
||||
test.each(cases)("%p + %p should be %p", (a, b, expected) => {
|
||||
// runs once for each test case provided
|
||||
})
|
||||
// runs once for each test case provided
|
||||
});
|
||||
```
|
||||
|
||||
There are a number of options available for formatting the case label depending on its type.
|
||||
|
||||
@@ -32,7 +32,6 @@
|
||||
"lint": "eslint './**/*.d.ts' --cache",
|
||||
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
|
||||
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
|
||||
"update-known-failures": "node packages/bun-internal-test/src/update-known-windows-failures.mjs"
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun"
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -12,6 +12,7 @@ import PQueue from "p-queue";
|
||||
const run_start = new Date();
|
||||
const TIMEOUT_DURATION = 1000 * 60 * 5;
|
||||
const SHORT_TIMEOUT_DURATION = Math.ceil(TIMEOUT_DURATION / 5);
|
||||
|
||||
function defaultConcurrency() {
|
||||
// This causes instability due to the number of open file descriptors / sockets in some tests
|
||||
// Windows has higher limits
|
||||
@@ -30,6 +31,22 @@ let force_ram_size = Number(BigInt(nativeMemory) >> BigInt(2)) + "";
|
||||
if (!(Number.isSafeInteger(force_ram_size_input) && force_ram_size_input > 0)) {
|
||||
force_ram_size = force_ram_size_input + "";
|
||||
}
|
||||
function uncygwinTempDir() {
|
||||
if (process.platform === "win32") {
|
||||
for (let key of ["TMPDIR", "TEMP", "TEMPDIR", "TMP"]) {
|
||||
let TMPDIR = process.env[key] || "";
|
||||
if (!/^\/[a-zA-Z]\//.test(TMPDIR)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const driveLetter = TMPDIR[1];
|
||||
TMPDIR = path.win32.normalize(`${driveLetter.toUpperCase()}:` + TMPDIR.substring(2));
|
||||
process.env[key] = TMPDIR;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
uncygwinTempDir();
|
||||
|
||||
const cwd = resolve(fileURLToPath(import.meta.url), "../../../../");
|
||||
process.chdir(cwd);
|
||||
@@ -130,8 +147,6 @@ function lookupWindowsError(code) {
|
||||
|
||||
const failing_tests = [];
|
||||
const passing_tests = [];
|
||||
const fixes = [];
|
||||
const regressions = [];
|
||||
let maxFd = -1;
|
||||
function getMaxFileDescriptor(path) {
|
||||
if (process.platform === "win32") {
|
||||
@@ -197,13 +212,6 @@ async function runTest(path) {
|
||||
const name = path.replace(cwd, "").slice(1);
|
||||
let exitCode, signal, err, output;
|
||||
|
||||
const expected_crash_reason = windows
|
||||
? await readFile(resolve(path), "utf-8").then(data => {
|
||||
const match = data.match(/@known-failing-on-windows:(.*)\n/);
|
||||
return match ? match[1].trim() : null;
|
||||
})
|
||||
: null;
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const activeTestObject = { start, proc: undefined };
|
||||
@@ -232,6 +240,7 @@ Starting "${name}"
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
|
||||
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
|
||||
},
|
||||
});
|
||||
@@ -353,7 +362,7 @@ Starting "${name}"
|
||||
|
||||
console.log(
|
||||
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
|
||||
passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
|
||||
passed ? "\x1b[32m✔" : "\x1b[31m✖"
|
||||
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
|
||||
);
|
||||
|
||||
@@ -368,20 +377,10 @@ Starting "${name}"
|
||||
}
|
||||
|
||||
if (!passed) {
|
||||
if (reason) {
|
||||
if (windows && !expected_crash_reason) {
|
||||
regressions.push({ path: name, reason, output });
|
||||
}
|
||||
}
|
||||
|
||||
failing_tests.push({ path: name, reason, output, expected_crash_reason });
|
||||
failing_tests.push({ path: name, reason, output });
|
||||
process.exitCode = 1;
|
||||
if (err) console.error(err);
|
||||
} else {
|
||||
if (windows && expected_crash_reason !== null) {
|
||||
fixes.push({ path: name, output, expected_crash_reason });
|
||||
}
|
||||
|
||||
passing_tests.push(name);
|
||||
}
|
||||
|
||||
@@ -479,30 +478,6 @@ ${header}
|
||||
|
||||
`;
|
||||
|
||||
if (fixes.length > 0) {
|
||||
report += `## Fixes\n\n`;
|
||||
report += "The following tests had @known-failing-on-windows but now pass:\n\n";
|
||||
report += fixes
|
||||
.map(
|
||||
({ path, expected_crash_reason }) => `- [\`${path}\`](${sectionLink(path)}) (before: ${expected_crash_reason})`,
|
||||
)
|
||||
.join("\n");
|
||||
report += "\n\n";
|
||||
}
|
||||
|
||||
if (regressions.length > 0) {
|
||||
report += `## Regressions\n\n`;
|
||||
report += regressions
|
||||
.map(
|
||||
({ path, reason, expected_crash_reason }) =>
|
||||
`- [\`${path}\`](${sectionLink(path)}) ${reason}${
|
||||
expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
|
||||
}`,
|
||||
)
|
||||
.join("\n");
|
||||
report += "\n\n";
|
||||
}
|
||||
|
||||
if (failingTestDisplay.length > 0) {
|
||||
report += `## Failing tests\n\n`;
|
||||
report += failingTestDisplay;
|
||||
@@ -517,17 +492,10 @@ if (failingTestDisplay.length > 0) {
|
||||
|
||||
if (failing_tests.length) {
|
||||
report += `## Failing tests log output\n\n`;
|
||||
for (const { path, output, reason, expected_crash_reason } of failing_tests) {
|
||||
for (const { path, output, reason } of failing_tests) {
|
||||
report += `### ${path}\n\n`;
|
||||
report += "[Link to file](" + linkToGH(path) + ")\n\n";
|
||||
if (windows && reason !== expected_crash_reason) {
|
||||
report += `To mark this as a known failing test, add this to the start of the file:\n`;
|
||||
report += `\`\`\`ts\n`;
|
||||
report += `// @known-failing-on-windows: ${reason}\n`;
|
||||
report += `\`\`\`\n\n`;
|
||||
} else {
|
||||
report += `${reason}\n\n`;
|
||||
}
|
||||
report += `${reason}\n\n`;
|
||||
report += "```\n";
|
||||
report += output
|
||||
.replace(/\x1b\[[0-9;]*m/g, "")
|
||||
@@ -542,18 +510,12 @@ writeFileSync(
|
||||
JSON.stringify({
|
||||
failing_tests,
|
||||
passing_tests,
|
||||
fixes,
|
||||
regressions,
|
||||
}),
|
||||
);
|
||||
|
||||
console.log("-> test-report.md, test-report.json");
|
||||
|
||||
if (ci) {
|
||||
if (windows) {
|
||||
action.setOutput("regressing_tests", regressions.map(({ path }) => `- \`${path}\``).join("\n"));
|
||||
action.setOutput("regressing_test_count", regressions.length);
|
||||
}
|
||||
if (failing_tests.length > 0) {
|
||||
action.setFailed(`${failing_tests.length} files with failing tests`);
|
||||
}
|
||||
@@ -565,12 +527,6 @@ if (ci) {
|
||||
}
|
||||
action.summary.addRaw(truncated_report);
|
||||
await action.summary.write();
|
||||
} else {
|
||||
if (windows && (regressions.length > 0 || fixes.length > 0)) {
|
||||
console.log(
|
||||
"\n\x1b[34mnote\x1b[0;2m:\x1b[0m If you would like to update the @known-failing-on-windows annotations, run `bun update-known-failures`",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(failing_tests.length ? 1 : process.exitCode);
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import assert from "assert";
|
||||
import { existsSync, readFileSync, writeFileSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
console.log("This script is only intended to be run on Windows.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.chdir(join(fileURLToPath(import.meta.url), "../../../../"));
|
||||
|
||||
if (!existsSync("test-report.json")) {
|
||||
console.log("No test report found. Please run `bun run test` first.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const test_report = JSON.parse(readFileSync("test-report.json", "utf8"));
|
||||
assert(Array.isArray(test_report.failing_tests));
|
||||
|
||||
for (const { path, reason, expected_crash_reason } of test_report.failing_tests) {
|
||||
assert(path);
|
||||
assert(reason);
|
||||
|
||||
if (expected_crash_reason !== reason) {
|
||||
const old_content = readFileSync(path, "utf8");
|
||||
if (!old_content.includes("// @known-failing-on-windows")) {
|
||||
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
|
||||
if (reason) {
|
||||
content = `// @known-failing-on-windows: ${reason}\n` + content;
|
||||
}
|
||||
writeFileSync(path, content, "utf8");
|
||||
console.log(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const { path } of test_report.fixes) {
|
||||
assert(path);
|
||||
|
||||
const old_content = readFileSync(path, "utf8");
|
||||
|
||||
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
|
||||
|
||||
if (content !== old_content) {
|
||||
writeFileSync(path, content, "utf8");
|
||||
console.log(path);
|
||||
}
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -19,9 +19,7 @@ export default function polyfillImportMeta(metaIn: ImportMeta) {
|
||||
dir: path.dirname(metapath),
|
||||
file: path.basename(metapath),
|
||||
require: require2,
|
||||
async resolve(id: string, parent?: string) {
|
||||
return this.resolveSync(id, parent);
|
||||
},
|
||||
resolve: metaIn.resolve,
|
||||
resolveSync(id: string, parent?: string) {
|
||||
return require2.resolve(id, {
|
||||
paths: typeof parent === 'string' ? [
|
||||
|
||||
14
packages/bun-release/.gitignore
vendored
14
packages/bun-release/.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
/npm/**/bin
|
||||
/npm/**/*.js
|
||||
/npm/**/.npmrc
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
/npm/**/bin
|
||||
/npm/**/*.js
|
||||
/npm/**/package.json
|
||||
/npm/**/.npmrc
|
||||
*.tgz
|
||||
|
||||
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-aarch64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-x64-baseline",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-x64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-aarch64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-x64-baseline",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-x64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
# Bun
|
||||
|
||||
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
|
||||
|
||||
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._
|
||||
3
packages/bun-release/npm/@oven/bun-windows-x64/README.md
Normal file
3
packages/bun-release/npm/@oven/bun-windows-x64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Bun
|
||||
|
||||
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
|
||||
@@ -1,42 +0,0 @@
|
||||
{
|
||||
"name": "bun",
|
||||
"version": "0.5.3",
|
||||
"description": "Bun is a fast all-in-one JavaScript runtime.",
|
||||
"keywords": [
|
||||
"bun",
|
||||
"bun.js",
|
||||
"node",
|
||||
"node.js",
|
||||
"runtime",
|
||||
"bundler",
|
||||
"transpiler",
|
||||
"typescript"
|
||||
],
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"bun": "bin/bun",
|
||||
"bunx": "bin/bun"
|
||||
},
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"scripts": {
|
||||
"postinstall": "node install.js"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@oven/bun-darwin-aarch64": "0.5.3",
|
||||
"@oven/bun-darwin-x64": "0.5.3",
|
||||
"@oven/bun-darwin-x64-baseline": "0.5.3",
|
||||
"@oven/bun-linux-aarch64": "0.5.3",
|
||||
"@oven/bun-linux-x64": "0.5.3",
|
||||
"@oven/bun-linux-x64-baseline": "0.5.3"
|
||||
},
|
||||
"os": [
|
||||
"darwin",
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64",
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/types": "^8.1.1",
|
||||
"bun-types": "^0.4.0",
|
||||
"bun-types": "^1.1.0",
|
||||
"prettier": "^2.8.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { importBun } from "../src/npm/install";
|
||||
import { importBun, optimizeBun } from "../src/npm/install";
|
||||
import { execFileSync } from "child_process";
|
||||
|
||||
importBun()
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
|
||||
import { mkdtemp } from "fs/promises";
|
||||
import { rmSync, mkdirSync } from "fs";
|
||||
import { tmpdir } from "os";
|
||||
import { dirname } from "path";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { spawn } from "../src/spawn";
|
||||
import type { Platform } from "../src/platform";
|
||||
@@ -10,41 +14,51 @@ import { buildSync, formatMessagesSync } from "esbuild";
|
||||
import type { JSZipObject } from "jszip";
|
||||
import { loadAsync } from "jszip";
|
||||
import { debug, log, error } from "../src/console";
|
||||
import { expect } from "bun:test";
|
||||
|
||||
const module = "bun";
|
||||
const owner = "@oven";
|
||||
let version: string;
|
||||
|
||||
const [tag, action] = process.argv.slice(2);
|
||||
|
||||
await build(tag);
|
||||
const release = await getRelease(tag);
|
||||
const version = await getSemver(release.tag_name);
|
||||
|
||||
if (action !== "test-only") await build();
|
||||
|
||||
if (action === "publish") {
|
||||
await publish();
|
||||
} else if (action === "dry-run") {
|
||||
await publish(true);
|
||||
} else if (action === "test") {
|
||||
await publish(true);
|
||||
await test();
|
||||
} else if (action === "test-only") {
|
||||
await test();
|
||||
} else if (action) {
|
||||
throw new Error(`Unknown action: ${action}`);
|
||||
}
|
||||
process.exit(0); // HACK
|
||||
|
||||
async function build(tag?: string): Promise<void> {
|
||||
const release = await getRelease(tag);
|
||||
version = await getSemver(release.tag_name);
|
||||
async function build(): Promise<void> {
|
||||
await buildRootModule();
|
||||
for (const platform of platforms) {
|
||||
if (action !== "publish" && (platform.os !== process.platform || platform.arch !== process.arch)) continue;
|
||||
await buildModule(release, platform);
|
||||
}
|
||||
}
|
||||
|
||||
async function publish(dryRun?: boolean): Promise<void> {
|
||||
const modules = platforms.map(({ bin }) => `${owner}/${bin}`);
|
||||
const modules = platforms
|
||||
.filter(({ os, arch }) => action === "publish" || (os === process.platform && arch === process.arch))
|
||||
.map(({ bin }) => `${owner}/${bin}`);
|
||||
modules.push(module);
|
||||
for (const module of modules) {
|
||||
publishModule(module, dryRun);
|
||||
}
|
||||
}
|
||||
|
||||
async function buildRootModule() {
|
||||
async function buildRootModule(dryRun?: boolean) {
|
||||
log("Building:", `${module}@${version}`);
|
||||
const cwd = join("npm", module);
|
||||
const define = {
|
||||
@@ -54,28 +68,53 @@ async function buildRootModule() {
|
||||
};
|
||||
bundle(join("scripts", "npm-postinstall.ts"), join(cwd, "install.js"), {
|
||||
define,
|
||||
});
|
||||
bundle(join("scripts", "npm-exec.ts"), join(cwd, "bin", "bun"), {
|
||||
define,
|
||||
banner: {
|
||||
js: "#!/usr/bin/env node",
|
||||
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
|
||||
},
|
||||
});
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
is replaced by Bun's 'postinstall' script. For this to work, make
|
||||
sure that you do not use --ignore-scripts while installing.
|
||||
|
||||
The postinstall script is responsible for linking the binary file
|
||||
directly into 'node_modules/.bin' and avoiding a Node.js wrapper
|
||||
script being called on every invocation of 'bun'. If this wasn't
|
||||
done, Bun would seem to be slower than Node.js, because it would
|
||||
be executing a copy of Node.js every time!
|
||||
|
||||
Unfortunately, it is not possible to fix all cases on all platforms
|
||||
without *requiring* a postinstall script.
|
||||
`,
|
||||
);
|
||||
const os = [...new Set(platforms.map(({ os }) => os))];
|
||||
const cpu = [...new Set(platforms.map(({ arch }) => arch))];
|
||||
writeJson(join(cwd, "package.json"), {
|
||||
name: module,
|
||||
description: "Bun is a fast all-in-one JavaScript runtime.",
|
||||
version: version,
|
||||
scripts: {
|
||||
postinstall: "node install.js",
|
||||
},
|
||||
optionalDependencies: Object.fromEntries(platforms.map(({ bin }) => [`${owner}/${bin}`, version])),
|
||||
optionalDependencies: Object.fromEntries(
|
||||
platforms.map(({ bin }) => [
|
||||
`${owner}/${bin}`,
|
||||
dryRun ? `file:./oven-${bin.replaceAll("/", "-") + "-" + version + ".tgz"}` : version,
|
||||
]),
|
||||
),
|
||||
bin: {
|
||||
bun: "bin/bun",
|
||||
bunx: "bin/bun",
|
||||
bun: "bin/bun.exe",
|
||||
bunx: "bin/bun.exe",
|
||||
},
|
||||
os,
|
||||
cpu,
|
||||
keywords: ["bun", "bun.js", "node", "node.js", "runtime", "bundler", "transpiler", "typescript"],
|
||||
homepage: "https://bun.sh",
|
||||
bugs: "https://github.com/oven-sh/issues",
|
||||
license: "MIT",
|
||||
repository: "https://github.com/oven-sh/bun",
|
||||
});
|
||||
if (exists(".npmrc")) {
|
||||
copy(".npmrc", join(cwd, ".npmrc"));
|
||||
@@ -95,11 +134,17 @@ async function buildModule(
|
||||
}
|
||||
const bun = await extractFromZip(asset.browser_download_url, `${bin}/bun`);
|
||||
const cwd = join("npm", module);
|
||||
mkdirSync(dirname(join(cwd, exe)), { recursive: true });
|
||||
write(join(cwd, exe), await bun.async("arraybuffer"));
|
||||
chmod(join(cwd, exe), 0o755);
|
||||
writeJson(join(cwd, "package.json"), {
|
||||
name: module,
|
||||
version: version,
|
||||
description: "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
homepage: "https://bun.sh",
|
||||
bugs: "https://github.com/oven-sh/issues",
|
||||
license: "MIT",
|
||||
repository: "https://github.com/oven-sh/bun",
|
||||
preferUnplugged: true,
|
||||
os: [os],
|
||||
cpu: [arch],
|
||||
@@ -111,22 +156,33 @@ async function buildModule(
|
||||
|
||||
function publishModule(name: string, dryRun?: boolean): void {
|
||||
log(dryRun ? "Dry-run Publishing:" : "Publishing:", `${name}@${version}`);
|
||||
const { exitCode, stdout, stderr } = spawn(
|
||||
"npm",
|
||||
[
|
||||
"publish",
|
||||
"--access",
|
||||
"public",
|
||||
"--tag",
|
||||
version.includes("canary") ? "canary" : "latest",
|
||||
...(dryRun ? ["--dry-run"] : []),
|
||||
],
|
||||
{
|
||||
cwd: join("npm", name),
|
||||
},
|
||||
);
|
||||
if (exitCode === 0) {
|
||||
if (!dryRun) {
|
||||
const { exitCode, stdout, stderr } = spawn(
|
||||
"npm",
|
||||
[
|
||||
"publish",
|
||||
"--access",
|
||||
"public",
|
||||
"--tag",
|
||||
version.includes("canary") ? "canary" : "latest",
|
||||
...(dryRun ? ["--dry-run"] : []),
|
||||
],
|
||||
{
|
||||
cwd: join("npm", name),
|
||||
},
|
||||
);
|
||||
error(stderr || stdout);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error("npm publish failed with code " + exitCode);
|
||||
}
|
||||
} else {
|
||||
const { exitCode, stdout, stderr } = spawn("npm", ["pack"], {
|
||||
cwd: join("npm", name),
|
||||
});
|
||||
error(stderr || stdout);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error("npm pack failed with code " + exitCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,3 +218,86 @@ function bundle(src: string, dst: string, options: BuildOptions = {}): void {
|
||||
throw new Error(messages.join("\n"));
|
||||
}
|
||||
}
|
||||
|
||||
async function test() {
|
||||
const root = await mkdtemp(join(tmpdir(), "bun-release-test-"));
|
||||
const $ = new Bun.$.Shell().cwd(root);
|
||||
|
||||
for (const platform of platforms) {
|
||||
if (platform.os !== process.platform) continue;
|
||||
if (platform.arch !== process.arch) continue;
|
||||
copy(
|
||||
join(
|
||||
import.meta.dir,
|
||||
"../npm/@oven/",
|
||||
platform.bin,
|
||||
"oven-" + platform.bin.replaceAll("/", "-") + `-${version}.tgz`,
|
||||
),
|
||||
join(root, `${platform.bin}-${version}.tgz`),
|
||||
);
|
||||
}
|
||||
|
||||
copy(join(import.meta.dir, "../npm", "bun", "bun-" + version + ".tgz"), join(root, "bun-" + version + ".tgz"));
|
||||
|
||||
console.log(root);
|
||||
for (const [install, exec] of [
|
||||
["npm i", "npm exec"],
|
||||
["yarn set version berry; yarn add", "yarn"],
|
||||
["yarn set version latest; yarn add", "yarn"],
|
||||
["pnpm i", "pnpm"],
|
||||
["bun i", "bun run"],
|
||||
]) {
|
||||
rmSync(join(root, "node_modules"), { recursive: true, force: true });
|
||||
rmSync(join(root, "package-lock.json"), { recursive: true, force: true });
|
||||
rmSync(join(root, "package.json"), { recursive: true, force: true });
|
||||
rmSync(join(root, "pnpm-lock.yaml"), { recursive: true, force: true });
|
||||
rmSync(join(root, "yarn.lock"), { recursive: true, force: true });
|
||||
writeJson(join(root, "package.json"), {
|
||||
name: "bun-release-test",
|
||||
});
|
||||
|
||||
console.log("Testing", install + " bun");
|
||||
await $`${{ raw: install }} ./bun-${version}.tgz`;
|
||||
|
||||
console.log("Running " + exec + " bun");
|
||||
|
||||
// let output = await $`${{
|
||||
// raw: exec,
|
||||
// }} bun -- -e "console.log(JSON.stringify([Bun.version, process.platform, process.arch, process.execPath]))"`.text();
|
||||
const split = exec.split(" ");
|
||||
let {
|
||||
stdout: output,
|
||||
stderr,
|
||||
exitCode,
|
||||
} = spawn(
|
||||
split[0],
|
||||
[
|
||||
...split.slice(1),
|
||||
"--",
|
||||
"bun",
|
||||
"-e",
|
||||
"console.log(JSON.stringify([Bun.version, process.platform, process.arch, process.execPath]))",
|
||||
],
|
||||
{
|
||||
cwd: root,
|
||||
},
|
||||
);
|
||||
if (exitCode !== 0) {
|
||||
console.error(stderr);
|
||||
throw new Error("Failed to run " + exec + " bun, exit code: " + exitCode);
|
||||
}
|
||||
|
||||
try {
|
||||
output = JSON.parse(output);
|
||||
} catch (e) {
|
||||
console.log({ output });
|
||||
throw e;
|
||||
}
|
||||
|
||||
expect(output[0]).toBe(version);
|
||||
expect(output[1]).toBe(process.platform);
|
||||
expect(output[2]).toBe(process.arch);
|
||||
expect(output[3]).toStartWith(root);
|
||||
expect(output[3]).toInclude("bun");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,24 +121,14 @@ async function downloadBun(platform: Platform, dst: string): Promise<void> {
|
||||
}
|
||||
|
||||
export function optimizeBun(path: string): void {
|
||||
if (os === "win32") {
|
||||
throw new Error(
|
||||
"You must use Windows Subsystem for Linux, aka. WSL, to run bun. Learn more: https://learn.microsoft.com/en-us/windows/wsl/install",
|
||||
);
|
||||
}
|
||||
const { npm_config_user_agent } = process.env;
|
||||
if (npm_config_user_agent && /\byarn\//.test(npm_config_user_agent)) {
|
||||
throw new Error(
|
||||
"Yarn does not support bun, because it does not allow linking to binaries. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
|
||||
);
|
||||
}
|
||||
const installScript = os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
|
||||
try {
|
||||
rename(path, join(__dirname, "bin", "bun"));
|
||||
rename(path, join(__dirname, "bin", "bun.exe"));
|
||||
return;
|
||||
} catch (error) {
|
||||
debug("optimizeBun failed", error);
|
||||
}
|
||||
throw new Error(
|
||||
"Your package manager doesn't seem to support bun. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
|
||||
`Your package manager doesn't seem to support bun. To use bun, install using the following command: ${installScript}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ export const os = process.platform;
|
||||
|
||||
export const arch = os === "darwin" && process.arch === "x64" && isRosetta2() ? "arm64" : process.arch;
|
||||
|
||||
export const avx2 = (arch === "x64" && os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2());
|
||||
export const avx2 =
|
||||
arch === "x64" &&
|
||||
((os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2()) || (os === "win32" && isWindowsAVX2()));
|
||||
|
||||
export type Platform = {
|
||||
os: string;
|
||||
@@ -55,6 +57,19 @@ export const platforms: Platform[] = [
|
||||
bin: "bun-linux-x64-baseline",
|
||||
exe: "bin/bun",
|
||||
},
|
||||
{
|
||||
os: "win32",
|
||||
arch: "x64",
|
||||
avx2: true,
|
||||
bin: "bun-windows-x64",
|
||||
exe: "bin/bun.exe",
|
||||
},
|
||||
{
|
||||
os: "win32",
|
||||
arch: "x64",
|
||||
bin: "bun-windows-x64-baseline",
|
||||
exe: "bin/bun.exe",
|
||||
},
|
||||
];
|
||||
|
||||
export const supportedPlatforms: Platform[] = platforms
|
||||
@@ -89,3 +104,17 @@ function isRosetta2(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isWindowsAVX2(): boolean {
|
||||
try {
|
||||
return (
|
||||
spawn("powershell", [
|
||||
"-c",
|
||||
`(Add-Type -MemberDefinition '[DllImport("kernel32.dll")] public static extern bool IsProcessorFeaturePresent(int ProcessorFeature);' -Name 'Kernel32' -Namespace 'Win32' -PassThru)::IsProcessorFeaturePresent(40);`,
|
||||
]).stdout == "True"
|
||||
);
|
||||
} catch (error) {
|
||||
debug("isWindowsAVX2 failed", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
23
packages/bun-types/bun.d.ts
vendored
23
packages/bun-types/bun.d.ts
vendored
@@ -43,7 +43,7 @@ declare module "bun" {
|
||||
*
|
||||
* @param {string} command The name of the executable or script
|
||||
* @param {string} options.PATH Overrides the PATH environment variable
|
||||
* @param {string} options.cwd Limits the search to a particular directory in which to searc
|
||||
* @param {string} options.cwd When given a relative path, use this path to join it.
|
||||
*/
|
||||
function which(command: string, options?: { PATH?: string; cwd?: string }): string | null;
|
||||
|
||||
@@ -277,12 +277,16 @@ declare module "bun" {
|
||||
blob(): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Configure the shell to not throw an exception on non-zero exit codes.
|
||||
* Configure the shell to not throw an exception on non-zero exit codes. Throwing can be re-enabled with `.throws(true)`.
|
||||
*
|
||||
* By default, the shell with throw an exception on commands which return non-zero exit codes.
|
||||
*/
|
||||
nothrow(): this;
|
||||
|
||||
/**
|
||||
* Configure whether or not the shell should throw an exception on non-zero exit codes.
|
||||
*
|
||||
* By default, this is configured to `true`.
|
||||
*/
|
||||
throws(shouldThrow: boolean): this;
|
||||
}
|
||||
@@ -2988,12 +2992,19 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
/**
|
||||
* Nanoseconds since Bun.js was started as an integer.
|
||||
* Returns the number of nanoseconds since the process was started.
|
||||
*
|
||||
* This uses a high-resolution monotonic system timer.
|
||||
* This function uses a high-resolution monotonic system timer to provide precise time measurements.
|
||||
* In JavaScript, numbers are represented as double-precision floating-point values (IEEE 754),
|
||||
* which can safely represent integers up to 2^53 - 1 (Number.MAX_SAFE_INTEGER).
|
||||
*
|
||||
* After 14 weeks of consecutive uptime, this function
|
||||
* wraps
|
||||
* Due to this limitation, while the internal counter may continue beyond this point,
|
||||
* the precision of the returned value will degrade after 14.8 weeks of uptime (when the nanosecond
|
||||
* count exceeds Number.MAX_SAFE_INTEGER). Beyond this point, the function will continue to count but
|
||||
* with reduced precision, which might affect time calculations and comparisons in long-running applications.
|
||||
*
|
||||
* @returns {number} The number of nanoseconds since the process was started, with precise values up to
|
||||
* Number.MAX_SAFE_INTEGER.
|
||||
*/
|
||||
function nanoseconds(): number;
|
||||
|
||||
|
||||
Binary file not shown.
36
packages/bun-types/globals.d.ts
vendored
36
packages/bun-types/globals.d.ts
vendored
@@ -1758,21 +1758,10 @@ declare global {
|
||||
* ```
|
||||
*/
|
||||
readonly env: NodeJS.ProcessEnv;
|
||||
/**
|
||||
* Resolve a module ID the same as if you imported it
|
||||
*
|
||||
* On failure, throws a `ResolveMessage`
|
||||
*/
|
||||
resolve(moduleId: string): Promise<string>;
|
||||
/**
|
||||
* Resolve a `moduleId` as though it were imported from `parent`
|
||||
*
|
||||
* On failure, throws a `ResolveMessage`
|
||||
*/
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
resolve(moduleId: string, parent: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* @deprecated Use `require.resolve` or `Bun.resolveSync(moduleId, path.dirname(parent))` instead
|
||||
*
|
||||
* Resolve a module ID the same as if you imported it
|
||||
*
|
||||
* The `parent` argument is optional, and defaults to the current module's path.
|
||||
@@ -1780,17 +1769,12 @@ declare global {
|
||||
resolveSync(moduleId: string, parent?: string): string;
|
||||
|
||||
/**
|
||||
* Load a CommonJS module
|
||||
* Load a CommonJS module within an ES Module. Bun's transpiler rewrites all
|
||||
* calls to `require` with `import.meta.require` when transpiling ES Modules
|
||||
* for the runtime.
|
||||
*
|
||||
* Internally, this is a synchronous version of ESModule's `import()`, with extra code for handling:
|
||||
* - CommonJS modules
|
||||
* - *.node files
|
||||
* - *.json files
|
||||
*
|
||||
* Warning: **This API is not stable** and may change in the future. Use at your
|
||||
* own risk. Usually, you should use `require` instead and Bun's transpiler
|
||||
* will automatically rewrite your code to use `import.meta.require` if
|
||||
* relevant.
|
||||
* Warning: **This API is not stable** and may change or be removed in the
|
||||
* future. Use at your own risk.
|
||||
*/
|
||||
require: NodeJS.Require;
|
||||
|
||||
@@ -1814,17 +1798,15 @@ declare global {
|
||||
readonly main: boolean;
|
||||
|
||||
/** Alias of `import.meta.dir`. Exists for Node.js compatibility */
|
||||
dirname: string;
|
||||
readonly dirname: string;
|
||||
|
||||
/** Alias of `import.meta.path`. Exists for Node.js compatibility */
|
||||
filename: string;
|
||||
readonly filename: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* NodeJS-style `require` function
|
||||
*
|
||||
* Internally, uses `import.meta.require`
|
||||
*
|
||||
* @param moduleId - The module ID to resolve
|
||||
*/
|
||||
var require: NodeJS.Require;
|
||||
|
||||
61
packages/bun-types/overrides.d.ts
vendored
61
packages/bun-types/overrides.d.ts
vendored
@@ -62,3 +62,64 @@ declare module "tls" {
|
||||
|
||||
function connect(options: BunConnectionOptions, secureConnectListener?: () => void): TLSSocket;
|
||||
}
|
||||
|
||||
declare module "util" {
|
||||
// https://nodejs.org/docs/latest/api/util.html#foreground-colors
|
||||
type ForegroundColors =
|
||||
| "black"
|
||||
| "blackBright"
|
||||
| "blue"
|
||||
| "blueBright"
|
||||
| "cyan"
|
||||
| "cyanBright"
|
||||
| "gray"
|
||||
| "green"
|
||||
| "greenBright"
|
||||
| "grey"
|
||||
| "magenta"
|
||||
| "magentaBright"
|
||||
| "red"
|
||||
| "redBright"
|
||||
| "white"
|
||||
| "whiteBright"
|
||||
| "yellow"
|
||||
| "yellowBright";
|
||||
|
||||
// https://nodejs.org/docs/latest/api/util.html#background-colors
|
||||
type BackgroundColors =
|
||||
| "bgBlack"
|
||||
| "bgBlackBright"
|
||||
| "bgBlue"
|
||||
| "bgBlueBright"
|
||||
| "bgCyan"
|
||||
| "bgCyanBright"
|
||||
| "bgGray"
|
||||
| "bgGreen"
|
||||
| "bgGreenBright"
|
||||
| "bgGrey"
|
||||
| "bgMagenta"
|
||||
| "bgMagentaBright"
|
||||
| "bgRed"
|
||||
| "bgRedBright"
|
||||
| "bgWhite"
|
||||
| "bgWhiteBright"
|
||||
| "bgYellow"
|
||||
| "bgYellowBright";
|
||||
|
||||
// https://nodejs.org/docs/latest/api/util.html#modifiers
|
||||
type Modifiers =
|
||||
| "blink"
|
||||
| "bold"
|
||||
| "dim"
|
||||
| "doubleunderline"
|
||||
| "framed"
|
||||
| "hidden"
|
||||
| "inverse"
|
||||
| "italic"
|
||||
| "overlined"
|
||||
| "reset"
|
||||
| "strikethrough"
|
||||
| "underline";
|
||||
|
||||
function styleText(format: ForegroundColors | BackgroundColors | Modifiers, text: string): string;
|
||||
}
|
||||
|
||||
65
packages/bun-types/test.d.ts
vendored
65
packages/bun-types/test.d.ts
vendored
@@ -201,12 +201,17 @@ declare module "bun:test" {
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
skipIf(condition: boolean): (label: string, fn: () => void) => void;
|
||||
/**
|
||||
* Marks this group of tests as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
todoIf(condition: boolean): (label: string, fn: () => void) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
|
||||
each<T extends Readonly<[any, ...any[]]>>(
|
||||
table: readonly T[],
|
||||
): (label: string, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
@@ -414,6 +419,18 @@ declare module "bun:test" {
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
/**
|
||||
* Marks this test as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should be marked TODO
|
||||
*/
|
||||
todoIf(
|
||||
condition: boolean,
|
||||
): (
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
@@ -1127,6 +1144,27 @@ declare module "bun:test" {
|
||||
* @param expected the expected error, error message, or error pattern
|
||||
*/
|
||||
toThrow(expected?: unknown): void;
|
||||
/**
|
||||
* Asserts that a function throws an error.
|
||||
*
|
||||
* - If expected is a `string` or `RegExp`, it will check the `message` property.
|
||||
* - If expected is an `Error` object, it will check the `name` and `message` properties.
|
||||
* - If expected is an `Error` constructor, it will check the class of the `Error`.
|
||||
* - If expected is not provided, it will check if anything as thrown.
|
||||
*
|
||||
* @example
|
||||
* function fail() {
|
||||
* throw new Error("Oops!");
|
||||
* }
|
||||
* expect(fail).toThrowError("Oops!");
|
||||
* expect(fail).toThrowError(/oops/i);
|
||||
* expect(fail).toThrowError(Error);
|
||||
* expect(fail).toThrowError();
|
||||
*
|
||||
* @param expected the expected error, error message, or error pattern
|
||||
* @alias toThrow
|
||||
*/
|
||||
toThrowError(expected?: unknown): void;
|
||||
/**
|
||||
* Asserts that a value matches a regular expression or includes a substring.
|
||||
*
|
||||
@@ -1410,22 +1448,47 @@ declare module "bun:test" {
|
||||
* Ensures that a mock function is called.
|
||||
*/
|
||||
toHaveBeenCalled(): void;
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
* @alias toHaveBeenCalled
|
||||
*/
|
||||
toBeCalled(): void;
|
||||
/**
|
||||
* Ensures that a mock function is called an exact number of times.
|
||||
*/
|
||||
toHaveBeenCalledTimes(expected: number): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledTimes
|
||||
*/
|
||||
toBeCalledTimes(expected: number): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
*/
|
||||
toHaveBeenCalledWith(...expected: unknown[]): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
toBeCalledWith(...expected: unknown[]): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the last call.
|
||||
*/
|
||||
toHaveBeenLastCalledWith(...expected: unknown[]): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
lastCalledWith(...expected: unknown[]): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
*/
|
||||
toHaveBeenNthCalledWith(n: number, ...expected: unknown[]): void;
|
||||
/**
|
||||
* Ensure that a mock function is called with specific arguments for the nth call.
|
||||
* @alias toHaveBeenCalledWith
|
||||
*/
|
||||
nthCalledWith(n: number, ...expected: unknown[]): void;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -724,7 +724,7 @@ LIBUS_SOCKET_DESCRIPTOR bsd_create_udp_socket(const char *host, int port) {
|
||||
}
|
||||
|
||||
LIBUS_SOCKET_DESCRIPTOR listenFd = LIBUS_SOCKET_ERROR;
|
||||
struct addrinfo *listenAddr;
|
||||
struct addrinfo *listenAddr = NULL;
|
||||
for (struct addrinfo *a = result; a && listenFd == LIBUS_SOCKET_ERROR; a = a->ai_next) {
|
||||
if (a->ai_family == AF_INET6) {
|
||||
listenFd = bsd_create_socket(a->ai_family, a->ai_socktype, a->ai_protocol);
|
||||
|
||||
@@ -44,8 +44,6 @@ void *sni_find(void *sni, const char *hostname);
|
||||
#endif
|
||||
|
||||
#include "./root_certs.h"
|
||||
static X509 *root_cert_instances[sizeof(root_certs) / sizeof(root_certs[0])] = {
|
||||
NULL};
|
||||
|
||||
/* These are in root_certs.cpp */
|
||||
extern X509_STORE *us_get_default_ca_store();
|
||||
@@ -102,9 +100,10 @@ struct us_internal_ssl_socket_context_t {
|
||||
struct us_internal_ssl_socket_t {
|
||||
struct us_socket_t s;
|
||||
SSL *ssl;
|
||||
int ssl_write_wants_read; // we use this for now
|
||||
int ssl_read_wants_write;
|
||||
int pending_handshake;
|
||||
unsigned int ssl_write_wants_read : 1; // we use this for now
|
||||
unsigned int ssl_read_wants_write : 1;
|
||||
unsigned int pending_handshake : 1;
|
||||
unsigned int received_ssl_shutdown : 1;
|
||||
};
|
||||
|
||||
int passphrase_cb(char *buf, int size, int rwflag, void *u) {
|
||||
@@ -186,6 +185,8 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
s->ssl = SSL_new(context->ssl_context);
|
||||
s->ssl_write_wants_read = 0;
|
||||
s->ssl_read_wants_write = 0;
|
||||
s->pending_handshake = 1;
|
||||
s->received_ssl_shutdown = 0;
|
||||
|
||||
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
|
||||
|
||||
@@ -203,11 +204,8 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
ip_length);
|
||||
|
||||
// Hello Message!
|
||||
// always handshake after open if on_handshake is set
|
||||
if (context->on_handshake || s->pending_handshake) {
|
||||
s->pending_handshake = 1;
|
||||
us_internal_ssl_handshake(s);
|
||||
}
|
||||
// always handshake after open
|
||||
us_internal_ssl_handshake(s);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -222,6 +220,30 @@ void us_internal_on_ssl_handshake(
|
||||
context->handshake_data = custom_data;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
// if we have some pending handshake we cancel it and try to check the
|
||||
// latest handshake error this way we will always call on_handshake with the
|
||||
// latest error before closing this should always call
|
||||
// secureConnection/secure before close if we remove this here, we will need
|
||||
// to do this check on every on_close event on sockets, fetch etc and will
|
||||
// increase complexity on a lot of places
|
||||
if (context->on_handshake != NULL) {
|
||||
struct us_bun_verify_error_t verify_error = us_internal_verify_error(s);
|
||||
context->on_handshake(s, 0, verify_error, context->handshake_data);
|
||||
}
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -232,7 +254,6 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
// will start on_open, on_writable or on_data
|
||||
if (!s->ssl) {
|
||||
s->pending_handshake = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -245,7 +266,8 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->pending_handshake = 0;
|
||||
|
||||
struct us_bun_verify_error_t verify_error = (struct us_bun_verify_error_t){
|
||||
@@ -257,6 +279,12 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
|
||||
int result = SSL_do_handshake(s->ssl);
|
||||
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return;
|
||||
}
|
||||
if (result <= 0) {
|
||||
int err = SSL_get_error(s->ssl, result);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
@@ -274,16 +302,14 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
on_handshake(s, 0, verify_error, custom_data);
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
||||
s->pending_handshake = 1;
|
||||
context->on_handshake = on_handshake;
|
||||
context->handshake_data = custom_data;
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
}
|
||||
s->pending_handshake = 1;
|
||||
context->on_handshake = on_handshake;
|
||||
context->handshake_data = custom_data;
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
} else {
|
||||
s->pending_handshake = 0;
|
||||
@@ -301,31 +327,12 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
if (context->on_handshake != NULL) {
|
||||
struct us_bun_verify_error_t verify_error = us_internal_verify_error(s);
|
||||
context->on_handshake(s, 0, verify_error, context->handshake_data);
|
||||
}
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_close(struct us_internal_ssl_socket_t *s, int code, void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
}
|
||||
|
||||
s->pending_handshake = 0;
|
||||
SSL_free(s->ssl);
|
||||
|
||||
return context->on_close(s, code, reason);
|
||||
@@ -345,7 +352,6 @@ ssl_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
// this whole function needs a complete clean-up
|
||||
struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
void *data, int length) {
|
||||
|
||||
// note: this context can change when we adopt the socket!
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -365,8 +371,8 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (us_socket_is_closed(0, &s->s) || s->received_ssl_shutdown) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (us_internal_ssl_socket_is_shut_down(s)) {
|
||||
@@ -397,11 +403,18 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
// check-ups
|
||||
int read = 0;
|
||||
restart:
|
||||
while (1) {
|
||||
// read until shutdown
|
||||
while (!s->received_ssl_shutdown) {
|
||||
int just_read = SSL_read(s->ssl,
|
||||
loop_ssl_data->ssl_read_output +
|
||||
LIBUS_RECV_BUFFER_PADDING + read,
|
||||
LIBUS_RECV_BUFFER_LENGTH - read);
|
||||
// we need to check if we received a shutdown here
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->pending_handshake = 0;
|
||||
s->received_ssl_shutdown = 1;
|
||||
// we will only close after we handle the data and errors
|
||||
}
|
||||
if (just_read <= 0) {
|
||||
int err = SSL_get_error(s->ssl, just_read);
|
||||
|
||||
@@ -419,7 +432,7 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
@@ -458,7 +471,7 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -478,7 +491,7 @@ restart:
|
||||
// emit data and restart
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING, read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -486,7 +499,11 @@ restart:
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
// we received the shutdown after reading so we close
|
||||
if (s->received_ssl_shutdown) {
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
// trigger writable if we failed last write with want read
|
||||
if (s->ssl_write_wants_read) {
|
||||
s->ssl_write_wants_read = 0;
|
||||
@@ -504,24 +521,11 @@ restart:
|
||||
}
|
||||
}
|
||||
|
||||
// check this then?
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
// printf("SSL_RECEIVED_SHUTDOWN\n");
|
||||
|
||||
// exit(-2);
|
||||
|
||||
// not correct anyways!
|
||||
s = us_internal_ssl_socket_close(s, 0, NULL);
|
||||
|
||||
// us_
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
@@ -544,7 +548,6 @@ ssl_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
0); // cast here!
|
||||
}
|
||||
|
||||
|
||||
// Do not call on_writable if the socket is closed.
|
||||
// on close means the socket data is no longer accessible
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
@@ -943,7 +946,9 @@ const char *us_X509_error_code(long err) { // NOLINT(runtime/int)
|
||||
|
||||
long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
const SSL *ssl,
|
||||
long def) { // NOLINT(runtime/int)
|
||||
long def) { // NOLINT(runtime/int)
|
||||
if (!ssl)
|
||||
return def;
|
||||
long err = def; // NOLINT(runtime/int)
|
||||
X509 *peer_cert = SSL_get_peer_certificate(ssl);
|
||||
if (peer_cert) {
|
||||
@@ -967,8 +972,8 @@ long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
|
||||
struct us_bun_verify_error_t
|
||||
us_internal_verify_error(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
s->received_ssl_shutdown) {
|
||||
return (struct us_bun_verify_error_t){
|
||||
.error = 0, .code = NULL, .reason = NULL};
|
||||
}
|
||||
@@ -1219,10 +1224,10 @@ void us_internal_ssl_socket_context_add_server_name(
|
||||
if (ssl_context) {
|
||||
/* Attach the user data to this context */
|
||||
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
|
||||
#if BUN_DEBUG
|
||||
printf("CANNOT SET EX DATA!\n");
|
||||
abort();
|
||||
#endif
|
||||
#if BUN_DEBUG
|
||||
printf("CANNOT SET EX DATA!\n");
|
||||
abort();
|
||||
#endif
|
||||
}
|
||||
|
||||
/* * We do not want to hold any nullptr's in our SNI tree */
|
||||
@@ -1442,8 +1447,8 @@ struct us_listen_socket_t *us_internal_ssl_socket_context_listen(
|
||||
}
|
||||
|
||||
struct us_listen_socket_t *us_internal_ssl_socket_context_listen_unix(
|
||||
struct us_internal_ssl_socket_context_t *context, const char *path, size_t pathlen,
|
||||
int options, int socket_ext_size) {
|
||||
struct us_internal_ssl_socket_context_t *context, const char *path,
|
||||
size_t pathlen, int options, int socket_ext_size) {
|
||||
return us_socket_context_listen_unix(0, &context->sc, path, pathlen, options,
|
||||
sizeof(struct us_internal_ssl_socket_t) -
|
||||
sizeof(struct us_socket_t) +
|
||||
@@ -1460,8 +1465,8 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect(
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_context_connect_unix(
|
||||
struct us_internal_ssl_socket_context_t *context, const char *server_path, size_t pathlen,
|
||||
int options, int socket_ext_size) {
|
||||
struct us_internal_ssl_socket_context_t *context, const char *server_path,
|
||||
size_t pathlen, int options, int socket_ext_size) {
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_context_connect_unix(
|
||||
0, &context->sc, server_path, pathlen, options,
|
||||
sizeof(struct us_internal_ssl_socket_t) - sizeof(struct us_socket_t) +
|
||||
@@ -1933,7 +1938,8 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
socket->ssl = NULL;
|
||||
socket->ssl_write_wants_read = 0;
|
||||
socket->ssl_read_wants_write = 0;
|
||||
|
||||
socket->pending_handshake = 1;
|
||||
socket->received_ssl_shutdown = 0;
|
||||
return socket;
|
||||
}
|
||||
|
||||
|
||||
@@ -91,6 +91,10 @@ void us_poll_start(struct us_poll_t *p, struct us_loop_t *loop, int events) {
|
||||
((events & LIBUS_SOCKET_WRITABLE) ? POLL_TYPE_POLLING_OUT : 0);
|
||||
|
||||
uv_poll_init_socket(loop->uv_loop, p->uv_p, p->fd);
|
||||
// This unref is okay in the context of Bun's event loop, because sockets have
|
||||
// a `Async.KeepAlive` associated with them, which is used instead of the
|
||||
// usockets internals. usockets doesnt have a notion of ref-counted handles.
|
||||
uv_unref((uv_handle_t *)p->uv_p);
|
||||
uv_poll_start(p->uv_p, events, poll_cb);
|
||||
}
|
||||
|
||||
@@ -211,7 +215,7 @@ struct us_poll_t *us_create_poll(struct us_loop_t *loop, int fallthrough,
|
||||
return p;
|
||||
}
|
||||
|
||||
/* If we update our block position we have to updarte the uv_poll data to point
|
||||
/* If we update our block position we have to update the uv_poll data to point
|
||||
* to us */
|
||||
struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
|
||||
unsigned int ext_size) {
|
||||
@@ -225,8 +229,8 @@ struct us_poll_t *us_poll_resize(struct us_poll_t *p, struct us_loop_t *loop,
|
||||
// timer
|
||||
struct us_timer_t *us_create_timer(struct us_loop_t *loop, int fallthrough,
|
||||
unsigned int ext_size) {
|
||||
struct us_internal_callback_t *cb = us_calloc(1,
|
||||
sizeof(struct us_internal_callback_t) + sizeof(uv_timer_t) + ext_size);
|
||||
struct us_internal_callback_t *cb = us_calloc(
|
||||
1, sizeof(struct us_internal_callback_t) + sizeof(uv_timer_t) + ext_size);
|
||||
|
||||
cb->loop = loop;
|
||||
cb->cb_expects_the_loop = 0; // never read?
|
||||
@@ -288,8 +292,8 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
|
||||
struct us_internal_async *us_internal_create_async(struct us_loop_t *loop,
|
||||
int fallthrough,
|
||||
unsigned int ext_size) {
|
||||
struct us_internal_callback_t *cb = us_calloc(1,
|
||||
sizeof(struct us_internal_callback_t) + sizeof(uv_async_t) + ext_size);
|
||||
struct us_internal_callback_t *cb = us_calloc(
|
||||
1, sizeof(struct us_internal_callback_t) + sizeof(uv_async_t) + ext_size);
|
||||
|
||||
cb->loop = loop;
|
||||
return (struct us_internal_async *)cb;
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#include "libusockets.h"
|
||||
#include "internal/internal.h"
|
||||
#include <stdlib.h>
|
||||
@@ -380,7 +380,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int events)
|
||||
}
|
||||
|
||||
break;
|
||||
} while (1);
|
||||
} while (s);
|
||||
}
|
||||
|
||||
/* Such as epollerr epollhup */
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
@@ -19,7 +19,7 @@
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#include "libuwebsockets.h"
|
||||
#include <string_view>
|
||||
#include "App.h"
|
||||
@@ -976,7 +976,7 @@ extern "C"
|
||||
return value.length();
|
||||
}
|
||||
#endif
|
||||
uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uintmax_t total_size, bool close_connection)
|
||||
uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1123,7 +1123,7 @@ extern "C"
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
return uwsRes->write(std::string_view(data, length));
|
||||
}
|
||||
uintmax_t uws_res_get_write_offset(int ssl, uws_res_t *res)
|
||||
uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1133,7 +1133,7 @@ extern "C"
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
return uwsRes->getWriteOffset();
|
||||
}
|
||||
void uws_res_override_write_offset(int ssl, uws_res_t *res, uintmax_t offset)
|
||||
void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1157,18 +1157,18 @@ extern "C"
|
||||
return uwsRes->hasResponded();
|
||||
}
|
||||
|
||||
void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uintmax_t, void *optional_data), void *optional_data)
|
||||
void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *optional_data)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
uWS::HttpResponse<true> *uwsRes = (uWS::HttpResponse<true> *)res;
|
||||
uwsRes->onWritable([handler, res, optional_data](uintmax_t a)
|
||||
uwsRes->onWritable([handler, res, optional_data](uint64_t a)
|
||||
{ return handler(res, a, optional_data); });
|
||||
}
|
||||
else
|
||||
{
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
uwsRes->onWritable([handler, res, optional_data](uintmax_t a)
|
||||
uwsRes->onWritable([handler, res, optional_data](uint64_t a)
|
||||
{ return handler(res, a, optional_data); });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
/*
|
||||
* Copyright 2022 Ciro Spaciari
|
||||
*
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
@@ -19,7 +19,7 @@
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef LIBUWS_CAPI_HEADER
|
||||
#define LIBUWS_CAPI_HEADER
|
||||
|
||||
@@ -209,7 +209,7 @@ extern "C"
|
||||
|
||||
//Response
|
||||
DLL_EXPORT void uws_res_end(int ssl, uws_res_t *res, const char *data, size_t length, bool close_connection);
|
||||
DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uintmax_t total_size, bool close_connection);
|
||||
DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection);
|
||||
DLL_EXPORT void uws_res_cork(int ssl, uws_res_t *res, void(*callback)(uws_res_t *res, void* user_data) ,void* user_data);
|
||||
DLL_EXPORT void uws_res_pause(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_resume(int ssl, uws_res_t *res);
|
||||
@@ -220,10 +220,10 @@ extern "C"
|
||||
DLL_EXPORT void uws_res_write_header_int(int ssl, uws_res_t *res, const char *key, size_t key_length, uint64_t value);
|
||||
DLL_EXPORT void uws_res_end_without_body(int ssl, uws_res_t *res, bool close_connection);
|
||||
DLL_EXPORT bool uws_res_write(int ssl, uws_res_t *res, const char *data, size_t length);
|
||||
DLL_EXPORT uintmax_t uws_res_get_write_offset(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uintmax_t offset);
|
||||
DLL_EXPORT uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset);
|
||||
DLL_EXPORT bool uws_res_has_responded(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uintmax_t, void *optional_data), void *user_data);
|
||||
DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *user_data);
|
||||
DLL_EXPORT void uws_res_on_aborted(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, void *optional_data), void *optional_data);
|
||||
DLL_EXPORT void uws_res_on_data(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, const char *chunk, size_t chunk_length, bool is_end, void *optional_data), void *optional_data);
|
||||
DLL_EXPORT void uws_res_upgrade(int ssl, uws_res_t *res, void *data, const char *sec_web_socket_key, size_t sec_web_socket_key_length, const char *sec_web_socket_protocol, size_t sec_web_socket_protocol_length, const char *sec_web_socket_extensions, size_t sec_web_socket_extensions_length, uws_socket_context_t *ws);
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_ASYNCSOCKET_H
|
||||
#define UWS_ASYNCSOCKET_H
|
||||
|
||||
@@ -255,10 +255,8 @@ public:
|
||||
if (asyncSocketData->buffer.length()) {
|
||||
/* Write off as much as we can */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), (int) asyncSocketData->buffer.length(), /*nextLength != 0 | */length);
|
||||
|
||||
/* On failure return, otherwise continue down the function */
|
||||
if ((unsigned int) written < asyncSocketData->buffer.length()) {
|
||||
|
||||
/* Update buffering (todo: we can do better here if we keep track of what happens to this guy later on) */
|
||||
asyncSocketData->buffer.erase((unsigned int) written);
|
||||
|
||||
@@ -268,7 +266,6 @@ public:
|
||||
} else {
|
||||
/* This path is horrible and points towards erroneous usage */
|
||||
asyncSocketData->buffer.append(src, (unsigned int) length);
|
||||
|
||||
return {length, true};
|
||||
}
|
||||
}
|
||||
@@ -310,7 +307,6 @@ public:
|
||||
if (optionally) {
|
||||
return {written, true};
|
||||
}
|
||||
|
||||
/* Fall back to worst possible case (should be very rare for HTTP) */
|
||||
/* At least we can reserve room for next chunk if we know it up front */
|
||||
if (nextLength) {
|
||||
@@ -344,7 +340,7 @@ public:
|
||||
auto [written, failed] = write(loopData->corkBuffer, (int) loopData->corkOffset, false, length);
|
||||
loopData->corkOffset = 0;
|
||||
|
||||
if (failed) {
|
||||
if (failed && optionally) {
|
||||
/* We do not need to care for buffering here, write does that */
|
||||
return {0, true};
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ extern "C" {
|
||||
}
|
||||
|
||||
#include "Http3ResponseData.h"
|
||||
|
||||
// clang-format off
|
||||
namespace uWS {
|
||||
|
||||
/* Is a quic stream */
|
||||
@@ -40,7 +40,7 @@ namespace uWS {
|
||||
return this;
|
||||
}
|
||||
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uintmax_t totalSize = 0) {
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uint64_t totalSize = 0) {
|
||||
Http3ResponseData *responseData = (Http3ResponseData *) us_quic_stream_ext((us_quic_stream_t *) this);
|
||||
|
||||
writeStatus("200 OK");
|
||||
@@ -109,7 +109,7 @@ namespace uWS {
|
||||
return this;
|
||||
}
|
||||
|
||||
Http3Response *onWritable(MoveOnlyFunction<bool(uintmax_t)> &&handler) {
|
||||
Http3Response *onWritable(MoveOnlyFunction<bool(uint64_t)> &&handler) {
|
||||
Http3ResponseData *responseData = (Http3ResponseData *) us_quic_stream_ext((us_quic_stream_t *) this);
|
||||
|
||||
responseData->onWritable = std::move(handler);
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
#ifndef UWS_H3RESPONSEDATA_H
|
||||
#define UWS_H3RESPONSEDATA_H
|
||||
|
||||
#include "MoveOnlyFunction.h"
|
||||
#include "AsyncSocketData.h"
|
||||
#include "MoveOnlyFunction.h"
|
||||
#include <string_view>
|
||||
|
||||
// clang-format off
|
||||
namespace uWS {
|
||||
struct Http3ResponseData {
|
||||
|
||||
MoveOnlyFunction<void()> onAborted = nullptr;
|
||||
MoveOnlyFunction<void(std::string_view, bool)> onData = nullptr;
|
||||
MoveOnlyFunction<bool(uintmax_t)> onWritable = nullptr;
|
||||
MoveOnlyFunction<bool(uint64_t)> onWritable = nullptr;
|
||||
|
||||
/* Status is always first header just like for h1 */
|
||||
unsigned int headerOffset = 0;
|
||||
|
||||
/* Write offset */
|
||||
uintmax_t offset = 0;
|
||||
uint64_t offset = 0;
|
||||
|
||||
BackPressure backpressure;
|
||||
};
|
||||
|
||||
@@ -374,9 +374,7 @@ private:
|
||||
return s;
|
||||
}
|
||||
|
||||
/* We don't want to fall through since we don't want to mess with timeout.
|
||||
* It makes little sense to drain any backpressure when the user has registered onWritable. */
|
||||
return s;
|
||||
/* We need to drain any remaining buffered data if success == true*/
|
||||
}
|
||||
|
||||
/* Drain any socket buffer, this might empty our backpressure and thus finish the request */
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_HTTPRESPONSE_H
|
||||
#define UWS_HTTPRESPONSE_H
|
||||
|
||||
@@ -87,7 +87,7 @@ public:
|
||||
|
||||
/* Returns true on success, indicating that it might be feasible to write more data.
|
||||
* Will start timeout if stream reaches totalSize or write failure. */
|
||||
bool internalEnd(std::string_view data, uintmax_t totalSize, bool optional, bool allowContentLength = true, bool closeConnection = false) {
|
||||
bool internalEnd(std::string_view data, uint64_t totalSize, bool optional, bool allowContentLength = true, bool closeConnection = false) {
|
||||
/* Write status if not already done */
|
||||
writeStatus(HTTP_200_OK);
|
||||
|
||||
@@ -435,7 +435,7 @@ public:
|
||||
|
||||
/* Try and end the response. Returns [true, true] on success.
|
||||
* Starts a timeout in some cases. Returns [ok, hasResponded] */
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uintmax_t totalSize = 0, bool closeConnection = false) {
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uint64_t totalSize = 0, bool closeConnection = false) {
|
||||
return {internalEnd(data, totalSize, true, true, closeConnection), hasResponded()};
|
||||
}
|
||||
|
||||
@@ -491,14 +491,14 @@ public:
|
||||
}
|
||||
|
||||
/* Get the current byte write offset for this Http response */
|
||||
uintmax_t getWriteOffset() {
|
||||
uint64_t getWriteOffset() {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
return httpResponseData->offset;
|
||||
}
|
||||
|
||||
/* If you are messing around with sendfile you might want to override the offset. */
|
||||
void overrideWriteOffset(uintmax_t offset) {
|
||||
void overrideWriteOffset(uint64_t offset) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->offset = offset;
|
||||
@@ -566,13 +566,21 @@ public:
|
||||
}
|
||||
|
||||
/* Attach handler for writable HTTP response */
|
||||
HttpResponse *onWritable(MoveOnlyFunction<bool(uintmax_t)> &&handler) {
|
||||
HttpResponse *onWritable(MoveOnlyFunction<bool(uint64_t)> &&handler) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->onWritable = std::move(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
/* Remove handler for writable HTTP response */
|
||||
HttpResponse *clearOnWritable() {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->onWritable = nullptr;
|
||||
return this;
|
||||
}
|
||||
|
||||
/* Attach handler for aborted HTTP request */
|
||||
HttpResponse *onAborted(MoveOnlyFunction<void()> &&handler) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
@@ -591,7 +599,7 @@ public:
|
||||
}
|
||||
|
||||
|
||||
void setWriteOffset(uintmax_t offset) {
|
||||
void setWriteOffset(uint64_t offset) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->offset = offset;
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_HTTPRESPONSEDATA_H
|
||||
#define UWS_HTTPRESPONSEDATA_H
|
||||
|
||||
@@ -46,12 +46,12 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
}
|
||||
|
||||
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
|
||||
bool callOnWritable(uintmax_t offset) {
|
||||
bool callOnWritable(uint64_t offset) {
|
||||
/* Borrow real onWritable */
|
||||
MoveOnlyFunction<bool(uintmax_t)> borrowedOnWritable = std::move(onWritable);
|
||||
MoveOnlyFunction<bool(uint64_t)> borrowedOnWritable = std::move(onWritable);
|
||||
|
||||
/* Set onWritable to placeholder */
|
||||
onWritable = [](uintmax_t) {return true;};
|
||||
onWritable = [](uint64_t) {return true;};
|
||||
|
||||
/* Run borrowed onWritable */
|
||||
bool ret = borrowedOnWritable(offset);
|
||||
@@ -75,11 +75,11 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
};
|
||||
|
||||
/* Per socket event handlers */
|
||||
MoveOnlyFunction<bool(uintmax_t)> onWritable;
|
||||
MoveOnlyFunction<bool(uint64_t)> onWritable;
|
||||
MoveOnlyFunction<void()> onAborted;
|
||||
MoveOnlyFunction<void(std::string_view, bool)> inStream; // onData
|
||||
/* Outgoing offset */
|
||||
uintmax_t offset = 0;
|
||||
uint64_t offset = 0;
|
||||
|
||||
/* Let's track number of bytes since last timeout reset in data handler */
|
||||
unsigned int received_bytes_per_timeout = 0;
|
||||
|
||||
BIN
packages/bun-vscode/bun.lockb
Normal file → Executable file
BIN
packages/bun-vscode/bun.lockb
Normal file → Executable file
Binary file not shown.
Binary file not shown.
@@ -5,7 +5,7 @@ Push-Location (Join-Path $BUN_DEPS_DIR 'libarchive')
|
||||
try {
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake @CMAKE_FLAGS -DBUILD_SHARED_LIBS=OFF -DENABLE_TEST=OFF -DENABLE_INSTALL=OFF -DENABLE_WERROR=0 ..
|
||||
Run cmake @CMAKE_FLAGS -DBUILD_SHARED_LIBS=OFF -DENABLE_TEST=OFF -DENABLE_INSTALL=OFF -DENABLE_WERROR=0 -DENABLE_ICONV=0 -DENABLE_LibGCC=0 -DENABLE_LZMA=0 -DENABLE_LZ4=0 -DENABLE_LIBXML2=0 -DENABLE_LIBB2=0 -DENABLE_OPENSSL=0 -DENABLE_CAT=0 ..
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item libarchive\archive_static.lib $BUN_DEPS_OUT_DIR\archive.lib
|
||||
|
||||
@@ -37,7 +37,6 @@ task() {
|
||||
fi
|
||||
}
|
||||
|
||||
task bun ./src/codegen/bundle-functions.ts --debug=OFF "$OUT"
|
||||
task bun ./src/codegen/bundle-modules.ts --debug=OFF "$OUT"
|
||||
|
||||
rm -rf "$OUT/tmp_functions"
|
||||
|
||||
@@ -43,4 +43,7 @@ mkdir -p $BUN_DEPS_OUT_DIR
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
echo "C Compiler: ${CC}"
|
||||
echo "C++ Compiler: ${CXX}"
|
||||
if [[ $(uname -s) == 'Darwin' ]]; then
|
||||
echo "OSX Deployment Target: ${CMAKE_OSX_DEPLOYMENT_TARGET}"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -10,8 +10,6 @@ const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const Syscall = bun.sys;
|
||||
|
||||
const exe_suffix = bun.exe_suffix;
|
||||
|
||||
const w = std.os.windows;
|
||||
|
||||
pub const StandaloneModuleGraph = struct {
|
||||
@@ -277,19 +275,16 @@ pub const StandaloneModuleGraph = struct {
|
||||
}.toClean;
|
||||
|
||||
const cloned_executable_fd: bun.FileDescriptor = brk: {
|
||||
var self_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
const self_exe = std.fs.selfExePath(&self_buf) catch |err| {
|
||||
const self_exe = bun.selfExePath() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get self executable path: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
self_buf[self_exe.len] = 0;
|
||||
const self_exeZ = self_buf[0..self_exe.len :0];
|
||||
|
||||
if (comptime Environment.isWindows) {
|
||||
// copy self and then open it for writing
|
||||
|
||||
var in_buf: bun.WPathBuffer = undefined;
|
||||
strings.copyU8IntoU16(&in_buf, self_exeZ);
|
||||
strings.copyU8IntoU16(&in_buf, self_exe);
|
||||
in_buf[self_exe.len] = 0;
|
||||
const in = in_buf[0..self_exe.len :0];
|
||||
var out_buf: bun.WPathBuffer = undefined;
|
||||
@@ -301,7 +296,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
|
||||
const file = bun.sys.openFileAtWindows(
|
||||
bun.invalid_fd,
|
||||
out,
|
||||
@@ -322,7 +316,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
if (comptime Environment.isMac) {
|
||||
// if we're on a mac, use clonefile() if we can
|
||||
// failure is okay, clonefile is just a fast path.
|
||||
if (Syscall.clonefile(self_exeZ, zname) == .result) {
|
||||
if (Syscall.clonefile(self_exe, zname) == .result) {
|
||||
switch (Syscall.open(zname, std.os.O.RDWR | std.os.O.CLOEXEC, 0)) {
|
||||
.result => |res| break :brk res,
|
||||
.err => {},
|
||||
@@ -376,7 +370,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
};
|
||||
const self_fd = brk2: {
|
||||
for (0..3) |retry| {
|
||||
switch (Syscall.open(self_exeZ, std.os.O.CLOEXEC | std.os.O.RDONLY, 0)) {
|
||||
switch (Syscall.open(self_exe, std.os.O.CLOEXEC | std.os.O.RDONLY, 0)) {
|
||||
.result => |res| break :brk2 res,
|
||||
.err => |err| {
|
||||
if (retry < 2) {
|
||||
@@ -665,30 +659,32 @@ pub const StandaloneModuleGraph = struct {
|
||||
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
|
||||
}
|
||||
|
||||
fn isBuiltInExe(argv0: []const u8) bool {
|
||||
/// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".
|
||||
/// this is a cheap way to avoid the extra overhead of opening the executable, and also just makes sense.
|
||||
fn isBuiltInExe(comptime T: type, argv0: []const T) bool {
|
||||
if (argv0.len == 0) return false;
|
||||
|
||||
if (argv0.len == 3) {
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "bun" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun"), false)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (argv0.len == 4) {
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "bunx" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bunx"), false)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "node" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "node"), false)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
if (bun.strings.eqlComptime(argv0, "bun-debug")) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun-debug"), true)) {
|
||||
return true;
|
||||
}
|
||||
if (bun.strings.eqlComptime(argv0, "bun-debugx")) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun-debugx"), true)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -697,13 +693,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
|
||||
fn openSelf() std.fs.OpenSelfExeError!bun.FileDescriptor {
|
||||
// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".
|
||||
// this is a cheap way to avoid the extra overhead
|
||||
// of opening the executable and also just makes sense.
|
||||
if (!Environment.isWindows) {
|
||||
const argv = bun.argv();
|
||||
if (argv.len > 0) {
|
||||
if (isBuiltInExe(argv[0])) {
|
||||
if (isBuiltInExe(u8, argv[0])) {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
}
|
||||
@@ -733,10 +726,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
.mac => {
|
||||
// Use of MAX_PATH_BYTES here is valid as the resulting path is immediately
|
||||
// opened with no modification.
|
||||
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const self_exe_path = try std.fs.selfExePath(&buf);
|
||||
buf[self_exe_path.len] = 0;
|
||||
const file = try std.fs.openFileAbsoluteZ(buf[0..self_exe_path.len :0].ptr, .{});
|
||||
const self_exe_path = try bun.selfExePath();
|
||||
const file = try std.fs.openFileAbsoluteZ(self_exe_path.ptr, .{});
|
||||
return bun.toFD(file.handle);
|
||||
},
|
||||
.windows => {
|
||||
@@ -746,8 +737,15 @@ pub const StandaloneModuleGraph = struct {
|
||||
var nt_path_buf: bun.WPathBuffer = undefined;
|
||||
const nt_path = bun.strings.addNTPathPrefix(&nt_path_buf, image_path);
|
||||
|
||||
const basename_start = std.mem.lastIndexOfScalar(u16, nt_path, '\\') orelse
|
||||
return error.FileNotFound;
|
||||
const basename = nt_path[basename_start + 1 .. nt_path.len - ".exe".len];
|
||||
if (isBuiltInExe(u16, basename)) {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
return bun.sys.openFileAtWindows(
|
||||
bun.invalid_fd,
|
||||
bun.FileDescriptor.cwd(),
|
||||
nt_path,
|
||||
// access_mask
|
||||
w.SYNCHRONIZE | w.GENERIC_READ,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user