mirror of
https://github.com/oven-sh/bun
synced 2026-02-06 17:08:51 +00:00
Compare commits
116 Commits
jarred/ser
...
jdalton/ar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8843060a24 | ||
|
|
5baa2fbb87 | ||
|
|
2615dc742e | ||
|
|
a4e8534779 | ||
|
|
9380e99e2b | ||
|
|
9898e0a731 | ||
|
|
ad6aadf7b2 | ||
|
|
ee05bae2be | ||
|
|
d615c11a57 | ||
|
|
3679f69b70 | ||
|
|
0b0bf353fa | ||
|
|
c4847f464e | ||
|
|
c8d072c2a9 | ||
|
|
f014f35531 | ||
|
|
fd3cd05647 | ||
|
|
20085d8ddc | ||
|
|
5735feac5d | ||
|
|
4ba993be7e | ||
|
|
0b2bb1fdc1 | ||
|
|
b29cf75a24 | ||
|
|
05fb044577 | ||
|
|
8825b29529 | ||
|
|
182b90896f | ||
|
|
40e33da4b4 | ||
|
|
f393f8a065 | ||
|
|
a09c421f2a | ||
|
|
ca1dbb4eb2 | ||
|
|
8a3b6f0439 | ||
|
|
e7d8abb263 | ||
|
|
013bc79f62 | ||
|
|
8326235ecc | ||
|
|
7543bf936a | ||
|
|
06ec233ebe | ||
|
|
0cdad4bebb | ||
|
|
14c23cc429 | ||
|
|
0dfbdc711a | ||
|
|
3cde2365ea | ||
|
|
3cfb2816ac | ||
|
|
c8f5c9f29c | ||
|
|
00f27fbeec | ||
|
|
76795af695 | ||
|
|
a4b151962a | ||
|
|
1cde9bcdac | ||
|
|
0bd7265e8f | ||
|
|
c831dd8db8 | ||
|
|
390441327f | ||
|
|
2e0e9f135b | ||
|
|
36f1bd3694 | ||
|
|
289d23b377 | ||
|
|
bb483e8479 | ||
|
|
268f13765c | ||
|
|
801e475c72 | ||
|
|
a073c85fdb | ||
|
|
8cb9f59753 | ||
|
|
5903a61410 | ||
|
|
b4941cdb0c | ||
|
|
58417217d6 | ||
|
|
2d57f25637 | ||
|
|
83a99bf190 | ||
|
|
e2ffa66bf7 | ||
|
|
8980dc026d | ||
|
|
4192728592 | ||
|
|
bdfbcb1898 | ||
|
|
6e07f9477c | ||
|
|
2dd2fc6ed0 | ||
|
|
9e6e8b0234 | ||
|
|
d53e6d6323 | ||
|
|
1edacc6e49 | ||
|
|
81badbac4c | ||
|
|
7531bfbfe0 | ||
|
|
1a989c9ad2 | ||
|
|
ab7825cca5 | ||
|
|
f02752577b | ||
|
|
c177e054f5 | ||
|
|
a01b01ae72 | ||
|
|
456a32344e | ||
|
|
6164fac256 | ||
|
|
4bbcc39d2f | ||
|
|
62c8c97e24 | ||
|
|
eb708d34ae | ||
|
|
c3ba60eef5 | ||
|
|
7f71f10ad1 | ||
|
|
9939049b85 | ||
|
|
a5c5b5dc61 | ||
|
|
a2835ef098 | ||
|
|
31c4c59740 | ||
|
|
0248e3c2b7 | ||
|
|
d869fcee21 | ||
|
|
55f8ae5aea | ||
|
|
e414d107e6 | ||
|
|
0103e2df73 | ||
|
|
02ad501f9e | ||
|
|
d433a1ada0 | ||
|
|
d712254128 | ||
|
|
a500c69728 | ||
|
|
d30b53591f | ||
|
|
b8389f32ce | ||
|
|
7172013a72 | ||
|
|
8ff7ee03d2 | ||
|
|
5296c26dab | ||
|
|
da6826e2b7 | ||
|
|
a637b4c880 | ||
|
|
d9074dfa5d | ||
|
|
ba9834d746 | ||
|
|
4869ebff24 | ||
|
|
a9804a3a11 | ||
|
|
6bedc23992 | ||
|
|
093e9c2499 | ||
|
|
3047c9005e | ||
|
|
e80e61c9a3 | ||
|
|
e3bf906127 | ||
|
|
4e7ed173ef | ||
|
|
31befad163 | ||
|
|
94b01b2f45 | ||
|
|
9ecb691380 | ||
|
|
fb8a299765 |
21
.github/workflows/bun-linux-build.yml
vendored
21
.github/workflows/bun-linux-build.yml
vendored
@@ -233,16 +233,18 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install-dependnecies
|
||||
name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
bun install --cwd=test/js/third_party/prisma --verbose
|
||||
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: Install dependencies (apt-get)
|
||||
run: sudo apt-get update && sudo apt-get install -y openssl
|
||||
- name: Install dependencies (root)
|
||||
run: bun install --verbose
|
||||
- name: Install dependencies (test)
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: Install dependencies (runner)
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- name: Install dependencies (prisma)
|
||||
run: bun install --cwd=test/js/third_party/prisma --verbose
|
||||
# This is disabled because the cores are ~5.5gb each
|
||||
# so it is easy to hit 50gb coredump downloads. Only enable if you need to retrive one
|
||||
|
||||
@@ -260,6 +262,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
18
.github/workflows/bun-mac-aarch64.yml
vendored
18
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -47,8 +47,8 @@ jobs:
|
||||
tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# - name: Checkout submodules
|
||||
# run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -416,12 +416,13 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: "Install dependencies (root)"
|
||||
run: bun install --verbose
|
||||
- name: "Install dependencies (test)"
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: "Install dependencies (runner)"
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -429,6 +430,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
3
.github/workflows/bun-mac-x64-baseline.yml
vendored
3
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -53,6 +53,8 @@ jobs:
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -416,6 +418,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
16
.github/workflows/bun-mac-x64.yml
vendored
16
.github/workflows/bun-mac-x64.yml
vendored
@@ -50,6 +50,8 @@ jobs:
|
||||
tag: bun-obj-darwin-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
@@ -400,12 +402,13 @@ jobs:
|
||||
run: |
|
||||
# If this hangs, it means something is seriously wrong with the build
|
||||
bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: "Install dependencies (root)"
|
||||
run: bun install --verbose
|
||||
- name: "Install dependencies (test)"
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: "Install dependencies (runner)"
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -413,6 +416,7 @@ jobs:
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
|
||||
44
.github/workflows/bun-release-test.yml
vendored
Normal file
44
.github/workflows/bun-release-test.yml
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
# This workflow tests bun-release's code and the packages to ensure that npm,
|
||||
# yarn, and pnpm can install bun on all platforms. This does not test that bun
|
||||
# itself works as it hardcodes 1.1.0 as the version to package.
|
||||
name: bun-release-test
|
||||
concurrency: release-test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "packages/bun-release/**"
|
||||
- ".github/workflows/bun-release-test.yml"
|
||||
|
||||
jobs:
|
||||
test-release-script:
|
||||
name: Test Release Script
|
||||
strategy:
|
||||
matrix:
|
||||
machine: [namespace-profile-bun-linux-x64, linux-arm64, macos-arm64, macos-12-large, windows-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.machine }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: "1.1.0"
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
run: bun install && npm i -g pnpm yarn npm
|
||||
|
||||
- name: Release
|
||||
run: bun upload-npm -- 1.1.0 test
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
57
.github/workflows/bun-windows.yml
vendored
57
.github/workflows/bun-windows.yml
vendored
@@ -18,6 +18,7 @@ on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/*.yml"
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
@@ -29,6 +30,7 @@ on:
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- ".github/workflows/*.yml"
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
@@ -59,6 +61,8 @@ jobs:
|
||||
steps:
|
||||
- run: git config --global core.autocrlf false && git config --global core.eol lf
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
@@ -88,7 +92,7 @@ jobs:
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-windows-msvc
|
||||
GIT_SHA=${{ github.sha }}
|
||||
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
|
||||
CANARY=0
|
||||
ZIG_OPTIMIZE=ReleaseSafe
|
||||
# TODO(@paperdave): enable ASSERTIONS=1
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
@@ -245,7 +249,8 @@ jobs:
|
||||
# $env:AWS_SECRET_ACCESS_KEY="${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}"
|
||||
# $SCCACHE="$PWD/${sczip}/${sczip}/sccache.exe"
|
||||
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
@@ -310,7 +315,8 @@ jobs:
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ matrix.cpu == 'nehalem' && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
@@ -417,19 +423,16 @@ jobs:
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- uses: secondlife/setup-cygwin@v1
|
||||
- uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# bun install --verbose
|
||||
# bun install --cwd=test --verbose
|
||||
# bun install --cwd=packages/bun-internal-test --verbose
|
||||
|
||||
npm install
|
||||
cd test && npm install
|
||||
cd ../packages/bun-internal-test && npm install
|
||||
cd ../..
|
||||
# Split these into multiple steps to make it clear which one fails
|
||||
- name: Install dependencies (root)
|
||||
run: bun install --verbose
|
||||
- name: Install dependencies (test)
|
||||
run: bun install --cwd=test --verbose
|
||||
- name: Install dependencies (runner)
|
||||
run: bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Run tests
|
||||
env:
|
||||
@@ -437,6 +440,7 @@ jobs:
|
||||
TMPDIR: ${{runner.temp}}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SHELLOPTS: igncr
|
||||
BUN_PATH_BASE: ${{runner.temp}}
|
||||
BUN_PATH: release/${{env.tag}}-${{ matrix.arch == 'x86_64' && 'x64' || 'aarch64' }}${{ matrix.cpu == 'nehalem' && '-baseline' || '' }}-profile/bun.exe
|
||||
@@ -460,7 +464,7 @@ jobs:
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
@@ -468,26 +472,29 @@ jobs:
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
### ❌🪟 [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
- name: Comment on PR
|
||||
if: always() && steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
message: |
|
||||
### ❌🪟 @${{ github.actor }}, there are **${{ steps.test.outputs.regressing_test_count }} test regressions** on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }}
|
||||
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on bun-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
|
||||
${{ steps.test.outputs.regressing_tests }}
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
[Full Test Output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.test.outputs.regressing_tests == '' && github.event_name == 'pull_request'
|
||||
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: test-windows-${{ matrix.arch }}-${{ matrix.cpu }}
|
||||
@@ -497,5 +504,5 @@ jobs:
|
||||
✅🪟 Test regressions on Windows ${{ matrix.arch }}${{ matrix.cpu == 'nehalem' && ' Baseline' || '' }} have been resolved.
|
||||
- id: fail
|
||||
name: Fail the build
|
||||
if: steps.test.outputs.regressing_tests != '' && github.event_name == 'pull_request'
|
||||
if: steps.test.outputs.failing_tests != ''
|
||||
run: exit 1
|
||||
|
||||
312
.gitignore
vendored
312
.gitignore
vendored
@@ -1,169 +1,143 @@
|
||||
.DS_Store
|
||||
zig-cache
|
||||
packages/*/*.wasm
|
||||
*.o
|
||||
*.a
|
||||
profile.json
|
||||
|
||||
.env
|
||||
node_modules
|
||||
.envrc
|
||||
.swcrc
|
||||
yarn.lock
|
||||
dist
|
||||
*.tmp
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
**/package-lock.json
|
||||
build
|
||||
*.wat
|
||||
zig-out
|
||||
pnpm-lock.yaml
|
||||
README.md.template
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/.gitattributes
|
||||
out
|
||||
outdir
|
||||
|
||||
.trace
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
*.trace
|
||||
github
|
||||
out.*
|
||||
out
|
||||
.parcel-cache
|
||||
esbuilddir
|
||||
*.bun
|
||||
parceldist
|
||||
esbuilddir
|
||||
outdir/
|
||||
outcss
|
||||
.next
|
||||
txt.js
|
||||
.idea
|
||||
.vscode/cpp*
|
||||
.vscode/clang*
|
||||
|
||||
node_modules_*
|
||||
*.jsb
|
||||
*.zip
|
||||
bun-zigld
|
||||
bun-singlehtreaded
|
||||
bun-nomimalloc
|
||||
bun-mimalloc
|
||||
examples/lotta-modules/bun-yday
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-nofscache
|
||||
|
||||
src/node-fallbacks/out/*
|
||||
src/node-fallbacks/node_modules
|
||||
sign.json
|
||||
release/
|
||||
*.dmg
|
||||
sign.*.json
|
||||
packages/debug-*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-*/*.o
|
||||
packages/bun-cli/postinstall.js
|
||||
|
||||
packages/bun-cli/bin/*
|
||||
bun-test-scratch
|
||||
misctools/fetch
|
||||
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/tests.zig
|
||||
*.blob
|
||||
src/deps/s2n-tls
|
||||
.npm
|
||||
.npm.gz
|
||||
|
||||
bun-binary
|
||||
|
||||
src/deps/PLCrashReporter/
|
||||
|
||||
*.dSYM
|
||||
*.crash
|
||||
misctools/sha
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
*.bc
|
||||
|
||||
src/fallback.version
|
||||
src/runtime.version
|
||||
*.sqlite
|
||||
*.database
|
||||
*.db
|
||||
misctools/machbench
|
||||
*.big
|
||||
.eslintcache
|
||||
|
||||
/bun-webkit
|
||||
|
||||
src/deps/c-ares/build
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/debug-bindings-obj
|
||||
|
||||
failing-tests.txt
|
||||
test.txt
|
||||
myscript.sh
|
||||
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/testdir
|
||||
/test.ts
|
||||
/test.js
|
||||
|
||||
src/js/out/modules*
|
||||
src/js/out/functions*
|
||||
src/js/out/tmp
|
||||
src/js/out/DebugPath.h
|
||||
|
||||
make-dev-stats.csv
|
||||
|
||||
.uuid
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
test/js/bun/glob/fixtures
|
||||
*.lib
|
||||
*.pdb
|
||||
CMakeFiles
|
||||
build.ninja
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
CMakeCache.txt
|
||||
cmake_install.cmake
|
||||
compile_commands.json
|
||||
|
||||
*.lib
|
||||
x64
|
||||
**/*.vcxproj*
|
||||
**/*.sln*
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
|
||||
/.webkit-cache
|
||||
/.cache
|
||||
/src/deps/libuv
|
||||
/build-*/
|
||||
/kcov-out
|
||||
|
||||
.vs
|
||||
|
||||
**/.verdaccio-db.json
|
||||
/test-report.md
|
||||
/test-report.json
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
.trace
|
||||
.uuid
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
*.blob
|
||||
*.bun
|
||||
*.crash
|
||||
*.database
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
*.o
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
*.pdb
|
||||
*.sqlite
|
||||
*.tmp
|
||||
*.trace
|
||||
*.wat
|
||||
*.zip
|
||||
**/.verdaccio-db.json
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
**/*.sln*
|
||||
**/*.vcxproj*
|
||||
**/package-lock.json
|
||||
/.cache
|
||||
/.webkit-cache
|
||||
/build-*/
|
||||
/bun-webkit
|
||||
/kcov-out
|
||||
/src/deps/libuv
|
||||
/test-report.json
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
bun-mimalloc
|
||||
bun-nomimalloc
|
||||
bun-singlehtreaded
|
||||
bun-test-scratch
|
||||
bun-zigld
|
||||
cmake_install.cmake
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
compile_commands.json
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
dist
|
||||
esbuilddir
|
||||
examples/lotta-modules/bun-nofscache
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-yday
|
||||
failing-tests.txt
|
||||
github
|
||||
make-dev-stats.csv
|
||||
misctools/fetch
|
||||
misctools/machbench
|
||||
misctools/sha
|
||||
myscript.sh
|
||||
node_modules
|
||||
node_modules_*
|
||||
out
|
||||
out.*
|
||||
outcss
|
||||
outdir
|
||||
outdir/
|
||||
packages/*/*.wasm
|
||||
packages/bun-*/*.o
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-cli/bin/*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/debug-*
|
||||
parceldist
|
||||
pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/c-ares/build
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/deps/PLCrashReporter/
|
||||
src/deps/s2n-tls
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/fallback.version
|
||||
src/js/out/DebugPath.h
|
||||
src/js/out/functions*
|
||||
src/js/out/modules*
|
||||
src/js/out/tmp
|
||||
src/node-fallbacks/node_modules
|
||||
src/node-fallbacks/out/*
|
||||
src/runtime.version
|
||||
src/tests.zig
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
tsconfig.tsbuildinfo
|
||||
txt.js
|
||||
x64
|
||||
yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
7
.gitmodules
vendored
7
.gitmodules
vendored
@@ -83,3 +83,10 @@ ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "zig"]
|
||||
path = src/deps/zig
|
||||
url = https://github.com/oven-sh/zig
|
||||
branch = bun
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
|
||||
@@ -3,3 +3,4 @@ src/deps
|
||||
test/snapshots
|
||||
test/js/deno
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
|
||||
1
.vscode/c_cpp_properties.json
vendored
1
.vscode/c_cpp_properties.json
vendored
@@ -55,6 +55,7 @@
|
||||
"name": "BunWithJSCDebug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/codegen",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
|
||||
47
.vscode/launch.json
generated
vendored
47
.vscode/launch.json
generated
vendored
@@ -355,6 +355,21 @@
|
||||
"action": "openExternally"
|
||||
}
|
||||
},
|
||||
// bun exec [...]
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "bun exec [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug",
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
|
||||
},
|
||||
"console": "internalConsole"
|
||||
},
|
||||
// bun test [*]
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -625,19 +640,16 @@
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Windows: bun run [file] (fast)",
|
||||
"name": "Windows: bun install",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
"args": ["run", "${fileBasename}"],
|
||||
"args": ["install"],
|
||||
"cwd": "${fileDirname}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "0"
|
||||
@@ -901,6 +913,29 @@
|
||||
"action": "openExternally"
|
||||
}
|
||||
},
|
||||
// Windows: bun exec [...]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
"request": "launch",
|
||||
"name": "Windows: bun exec [...]",
|
||||
"program": "${workspaceFolder}/build/bun-debug.exe",
|
||||
"args": ["exec", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_DEBUG_QUIET_LOGS",
|
||||
"value": "1"
|
||||
},
|
||||
{
|
||||
"name": "BUN_GARBAGE_COLLECTOR_LEVEL",
|
||||
"value": "2"
|
||||
}
|
||||
]
|
||||
},
|
||||
// Windows: bun test [*]
|
||||
{
|
||||
"type": "cppvsdbg",
|
||||
|
||||
21
.vscode/settings.json
vendored
21
.vscode/settings.json
vendored
@@ -40,7 +40,7 @@
|
||||
// C++
|
||||
"lldb.verboseLogging": false,
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
"editor.defaultFormatter": "xaver.clang-format"
|
||||
},
|
||||
@@ -55,7 +55,7 @@
|
||||
"prettier.enable": true,
|
||||
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
@@ -72,12 +72,12 @@
|
||||
|
||||
// JSON
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
|
||||
// Markdown
|
||||
"[markdown]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
@@ -94,12 +94,17 @@
|
||||
|
||||
// TOML
|
||||
"[toml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// YAML
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// Docker
|
||||
"[dockerfile]": {
|
||||
"editor.formatOnSave": false
|
||||
},
|
||||
|
||||
// Files
|
||||
@@ -148,5 +153,5 @@
|
||||
"WebKit/WebDriver": true,
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ cmake_minimum_required(VERSION 3.22)
|
||||
cmake_policy(SET CMP0091 NEW)
|
||||
cmake_policy(SET CMP0067 NEW)
|
||||
|
||||
set(Bun_VERSION "1.0.36")
|
||||
set(WEBKIT_TAG 089023cc9078b3aa173869fd6685f3e7bed2a994)
|
||||
set(Bun_VERSION "1.1.3")
|
||||
set(WEBKIT_TAG e3a2d89a0b1644cc8d5c245bd2ffee4d4bd6c1d5)
|
||||
|
||||
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
||||
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
|
||||
@@ -315,6 +315,10 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
|
||||
|
||||
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
|
||||
|
||||
if(NOT ZIG_LIB_DIR)
|
||||
cmake_path(SET ZIG_LIB_DIR NORMALIZE "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/zig/lib")
|
||||
endif()
|
||||
|
||||
if(USE_VALGRIND)
|
||||
# Disable SIMD
|
||||
set(USE_BASELINE_BUILD ON)
|
||||
@@ -551,6 +555,7 @@ else()
|
||||
add_compile_definitions("BUN_DEBUG=1")
|
||||
set(ASSERT_ENABLED "1")
|
||||
endif()
|
||||
message(STATUS "Using WebKit from ${WEBKIT_DIR}")
|
||||
else()
|
||||
if(NOT EXISTS "${WEBKIT_DIR}/lib/${libWTF}.${STATIC_LIB_EXT}" OR NOT EXISTS "${WEBKIT_DIR}/lib/${libJavaScriptCore}.${STATIC_LIB_EXT}")
|
||||
if(WEBKIT_DIR MATCHES "src/bun.js/WebKit$")
|
||||
@@ -745,19 +750,26 @@ if(NOT NO_CODEGEN)
|
||||
file(GLOB BUN_TS_MODULES ${CONFIGURE_DEPENDS}
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/bun/*.js"
|
||||
"${BUN_SRC}/js/bun/*.ts"
|
||||
"${BUN_SRC}/js/bun/*.js"
|
||||
"${BUN_SRC}/js/builtins/*.ts"
|
||||
"${BUN_SRC}/js/builtins/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
"${BUN_SRC}/js/internal/*.js"
|
||||
"${BUN_SRC}/js/internal/*.ts"
|
||||
"${BUN_SRC}/js/node/*.js"
|
||||
"${BUN_SRC}/js/node/*.ts"
|
||||
"${BUN_SRC}/js/thirdparty/*.js"
|
||||
"${BUN_SRC}/js/thirdparty/*.ts"
|
||||
)
|
||||
file(GLOB BUN_TS_FUNCTIONS ${CONFIGURE_DEPENDS} "${BUN_SRC}/js/builtins/*.ts")
|
||||
|
||||
file(GLOB CODEGEN_FILES ${CONFIGURE_DEPENDS} "${BUN_CODEGEN_SRC}/*.ts")
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+createInternalModuleById.h"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistry+enum.h"
|
||||
@@ -765,10 +777,12 @@ if(NOT NO_CODEGEN)
|
||||
"${BUN_WORKDIR}/codegen/NativeModuleImpl.h"
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_WORKDIR}/codegen/SyntheticModuleType.h"
|
||||
"${BUN_WORKDIR}/codegen/GeneratedJS2Native.h"
|
||||
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-modules.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
|
||||
DEPENDS ${BUN_TS_MODULES} ${CODEGEN_FILES}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Bundling JS modules"
|
||||
COMMENT "Bundling JS"
|
||||
)
|
||||
endif()
|
||||
|
||||
@@ -776,15 +790,6 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
|
||||
"${BUN_SRC}/bun.js/bindings/InternalModuleRegistry.cpp"
|
||||
"${BUN_WORKDIR}/codegen/InternalModuleRegistryConstants.h"
|
||||
)
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp"
|
||||
"${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.h"
|
||||
COMMAND ${BUN_EXECUTABLE} run "${BUN_SRC}/codegen/bundle-functions.ts" "--debug=${DEBUG}" "${BUN_WORKDIR}"
|
||||
DEPENDS ${BUN_TS_FUNCTIONS} ${CODEGEN_FILES}
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Bundling JS builtin functions"
|
||||
)
|
||||
list(APPEND BUN_RAW_SOURCES "${BUN_WORKDIR}/codegen/WebCoreJSBuiltins.cpp")
|
||||
|
||||
# --- Peechy API ---
|
||||
@@ -852,8 +857,10 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
OUTPUT "${BUN_ZIG_OBJ}"
|
||||
COMMAND
|
||||
"${ZIG_COMPILER}" "build" "obj"
|
||||
"--zig-lib-dir" "${ZIG_LIB_DIR}"
|
||||
"-Doutput-file=${BUN_ZIG_OBJ}"
|
||||
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
|
||||
"-freference-trace=10"
|
||||
"-Dversion=${Bun_VERSION}"
|
||||
"-Dcanary=${CANARY}"
|
||||
"-Doptimize=${ZIG_OPTIMIZE}"
|
||||
@@ -866,6 +873,7 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
|
||||
"${BUN_WORKDIR}/codegen/ResolvedSourceTag.zig"
|
||||
"${BUN_IDENTIFIER_CACHE_OUT}"
|
||||
"${BUN_SRC}/api/schema.zig"
|
||||
"${BUN_SRC}/bun.js/bindings/GeneratedJS2Native.zig"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
COMMENT "Building zig code"
|
||||
VERBATIM
|
||||
|
||||
@@ -116,7 +116,7 @@ RUN apt-get update -y \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
arm64) variant="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
*) echo "unsupported architecture: $arch"; exit 1 ;; \
|
||||
esac \
|
||||
&& wget "${BUN_DOWNLOAD_URL_BASE}/bun-linux-${variant}.zip" \
|
||||
&& unzip bun-linux-${variant}.zip \
|
||||
@@ -414,7 +414,7 @@ COPY --from=bun-codegen-for-zig ${BUN_DIR}/packages/bun-error/dist ${BUN_DIR}/pa
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN mkdir -p build \
|
||||
&& bun run $BUN_DIR/src/codegen/bundle-modules-fast.ts $BUN_DIR/build \
|
||||
&& bun run $BUN_DIR/src/codegen/bundle-modules.ts --debug=OFF $BUN_DIR/build \
|
||||
&& cd build \
|
||||
&& cmake .. \
|
||||
-G Ninja \
|
||||
@@ -429,6 +429,7 @@ RUN mkdir -p build \
|
||||
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
-DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \
|
||||
&& ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v
|
||||
|
||||
FROM scratch as build_release_obj
|
||||
|
||||
@@ -45,16 +45,17 @@ bunx cowsay 'Hello, world!' # execute a package
|
||||
|
||||
## Install
|
||||
|
||||
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
|
||||
Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
>
|
||||
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Database } from "https://deno.land/x/sqlite3@0.9.1/mod.ts";
|
||||
import { Database } from "https://deno.land/x/sqlite3@0.11.1/mod.ts";
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const db = new Database("./src/northwind.sqlite");
|
||||
|
||||
@@ -629,3 +629,5 @@ pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, obj_step: *
|
||||
obj.link_function_sections = true;
|
||||
}
|
||||
}
|
||||
|
||||
// !
|
||||
@@ -96,6 +96,10 @@ FROM alpine:3.18
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
COPY docker-entrypoint.sh /usr/local/bin/
|
||||
|
||||
|
||||
@@ -62,6 +62,10 @@ FROM debian:bullseye-slim
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
|
||||
@@ -3,6 +3,8 @@ FROM debian:bullseye-slim AS build
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
# Node.js includes python3 for node-gyp, see https://github.com/oven-sh/bun/issues/9807
|
||||
# Though, not on slim and alpine images.
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
ca-certificates \
|
||||
@@ -11,6 +13,7 @@ RUN apt-get update -qq \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
unzip \
|
||||
python3 \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
@@ -63,6 +66,10 @@ COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
|
||||
@@ -62,6 +62,10 @@ FROM gcr.io/distroless/base-nossl-debian11
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
# Ensure `bun install -g` works
|
||||
ARG BUN_INSTALL_BIN=/usr/local/bin
|
||||
ENV BUN_INSTALL_BIN=${BUN_INSTALL_BIN}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
|
||||
@@ -6,12 +6,12 @@ Bun implements the following properties.
|
||||
import.meta.dir; // => "/path/to/project"
|
||||
import.meta.file; // => "file.ts"
|
||||
import.meta.path; // => "/path/to/project/file.ts"
|
||||
import.meta.url; // => "file:///path/to/project/file.ts"
|
||||
|
||||
import.meta.main; // `true` if this file is directly executed by `bun run`
|
||||
// `false` otherwise
|
||||
|
||||
import.meta.resolveSync("zod")
|
||||
// resolve an import specifier relative to the directory
|
||||
import.meta.resolve("zod"); // => "file:///path/to/project/node_modules/zod/index.js"
|
||||
```
|
||||
|
||||
{% table %}
|
||||
@@ -28,13 +28,18 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.file`
|
||||
- The name of the current file, e.g. `index.tsx`
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.path`
|
||||
- Absolute path to the current file, e.g. `/path/to/project/index.tx`. Equivalent to `__filename` in CommonJS modules (and Node.js)
|
||||
- Absolute path to the current file, e.g. `/path/to/project/index.ts`. Equivalent to `__filename` in CommonJS modules (and Node.js)
|
||||
|
||||
---
|
||||
|
||||
@@ -43,30 +48,22 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.url`
|
||||
- A string url to the current file, e.g. `file:///path/to/project/index.tx`
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.main`
|
||||
- `boolean` Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
|
||||
- Indicates whether the current file is the entrypoint to the current `bun` process. Is the file being directly executed by `bun run` or is it being imported?
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.resolve{Sync}`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file?
|
||||
- `import.meta.resolve`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to a url. Equivalent to [`import.meta.resolve` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#resolve)
|
||||
|
||||
```ts
|
||||
import.meta.resolveSync("zod");
|
||||
// => "/path/to/project/node_modules/zod/index.ts"
|
||||
|
||||
import.meta.resolveSync("./file.tsx");
|
||||
// => "/path/to/project/file.tsx"
|
||||
import.meta.resolve("zod");
|
||||
// => "file:///path/to/project/node_modules/zod/index.ts"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.url`
|
||||
- A `string` url to the current file, e.g. `file:///path/to/project/index.ts`. Equivalent to [`import.meta.url` in browsers](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/import.meta#url)
|
||||
|
||||
{% /table %}
|
||||
|
||||
@@ -186,6 +186,7 @@ proc.unref();
|
||||
## Inter-process communication (IPC)
|
||||
|
||||
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
|
||||
|
||||
{%callout%}
|
||||
**Note** — This API is only compatible with other `bun` processes. Use `process.execPath` to get a path to the currently running `bun` executable.
|
||||
{%/callout%}
|
||||
@@ -227,8 +228,6 @@ process.on("message", (message) => {
|
||||
});
|
||||
```
|
||||
|
||||
All messages are serialized using the JSC `serialize` API, which allows for the same set of [transferrable types](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects) supported by `postMessage` and `structuredClone`, including strings, typed arrays, streams, and objects.
|
||||
|
||||
```ts#child.ts
|
||||
// send a string
|
||||
process.send("Hello from child as string");
|
||||
@@ -237,6 +236,11 @@ process.send("Hello from child as string");
|
||||
process.send({ message: "Hello from child as object" });
|
||||
```
|
||||
|
||||
The `ipcMode` option controls the underlying communication format between the two processes:
|
||||
|
||||
- `advanced`: (default) Messages are serialized using the JSC `serialize` API, which supports cloning [everything `structuredClone` supports](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm). This does not support transferring ownership of objects.
|
||||
- `json`: Messages are serialized using `JSON.stringify` and `JSON.parse`, which does not support as many object types as `advanced` does.
|
||||
|
||||
## Blocking API (`Bun.spawnSync()`)
|
||||
|
||||
Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways.
|
||||
|
||||
@@ -635,7 +635,7 @@ Bun.resolveSync("zod", "/path/to/project");
|
||||
// => "/path/to/project/node_modules/zod/index.ts"
|
||||
```
|
||||
|
||||
To resolve relative to the current working directory, pass `process.cwd` or `"."` as the root.
|
||||
To resolve relative to the current working directory, pass `process.cwd()` or `"."` as the root.
|
||||
|
||||
```ts
|
||||
Bun.resolveSync("./foo.ts", process.cwd());
|
||||
|
||||
@@ -156,7 +156,7 @@ Like the Bun runtime, the bundler supports an array of file types out of the box
|
||||
---
|
||||
|
||||
- `.js` `.jsx`, `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx`
|
||||
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms, including dead code elimination, tree shaking, and environment variable inlining. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
|
||||
- Uses Bun's built-in transpiler to parse the file and transpile TypeScript/JSX syntax to vanilla JavaScript. The bundler executes a set of default transforms including dead code elimination and tree shaking. At the moment Bun does not attempt to down-convert syntax; if you use recently ECMAScript syntax, that will be reflected in the bundled code.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ Bun uses the file extension to determine which built-in _loader_ should be used
|
||||
|
||||
**JavaScript**. Default for `.cjs` and `.mjs`.
|
||||
|
||||
Parses the code and applies a set of default transforms, like dead-code elimination, tree shaking, and environment variable inlining. Note that Bun does not attempt to down-convert syntax at the moment.
|
||||
Parses the code and applies a set of default transforms like dead-code elimination and tree shaking. Note that Bun does not attempt to down-convert syntax at the moment.
|
||||
|
||||
### `jsx`
|
||||
|
||||
@@ -178,7 +178,7 @@ In the bundler, `.node` files are handled using the [`file`](#file) loader.
|
||||
In the runtime and bundler, SQLite databases can be directly imported. This will load the database using [`bun:sqlite`](/docs/api/sqlite.md).
|
||||
|
||||
```ts
|
||||
import db from "./my.db" with {type: "sqlite"};
|
||||
import db from "./my.db" with { type: "sqlite" };
|
||||
```
|
||||
|
||||
This is only supported when the `target` is `bun`.
|
||||
@@ -189,21 +189,21 @@ You can change this behavior with the `"embed"` attribute:
|
||||
|
||||
```ts
|
||||
// embed the database into the bundle
|
||||
import db from "./my.db" with {type: "sqlite", embed: "true"};
|
||||
import db from "./my.db" with { type: "sqlite", embed: "true" };
|
||||
```
|
||||
|
||||
When using a [standalone executable](/docs/bundler/executables), the database is embedded into the single-file executable.
|
||||
|
||||
Otherwise, the database to embed is copied into the `outdir` with a hashed filename.
|
||||
|
||||
### `bunshell` loader
|
||||
### `sh` loader
|
||||
|
||||
**Bun Shell loader**. Default for `.bun.sh` files
|
||||
**Bun Shell loader**. Default for `.sh` files
|
||||
|
||||
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting bun itself, so it's not available in the bundler or in the runtime.
|
||||
This loader is used to parse [Bun Shell](/docs/runtime/shell) scripts. It's only supported when starting Bun itself, so it's not available in the bundler or in the runtime.
|
||||
|
||||
```sh
|
||||
$ bun run ./script.bun.sh
|
||||
$ bun run ./script.sh
|
||||
```
|
||||
|
||||
### `file`
|
||||
|
||||
@@ -77,4 +77,4 @@ Bun automatically loads environment variables from `.env` files before running a
|
||||
2. `NODE_ENV` === `"production"` ? `.env.production` : `.env.development`
|
||||
3. `.env`
|
||||
|
||||
To debug environment variables, run `bun run env` to view a list of resolved environment variables. -->
|
||||
To debug environment variables, run `bun --print process.env` to view a list of resolved environment variables. -->
|
||||
|
||||
@@ -18,10 +18,10 @@ Bun.env.API_TOKEN; // => "secret"
|
||||
|
||||
---
|
||||
|
||||
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
|
||||
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
|
||||
|
||||
```sh
|
||||
$ bun run env
|
||||
$ bun --print process.env
|
||||
BAZ=stuff
|
||||
FOOBAR=aaaaaa
|
||||
<lots more lines>
|
||||
|
||||
@@ -4,7 +4,7 @@ name: Migrate from Jest to Bun's test runner
|
||||
|
||||
In many cases, Bun's test runner can run Jest test suites with no code changes. Just run `bun test` instead of `npx jest`, `yarn test`, etc.
|
||||
|
||||
```sh-diff
|
||||
```sh
|
||||
- $ npx jest
|
||||
- $ yarn test
|
||||
+ $ bun test
|
||||
@@ -57,7 +57,7 @@ Replace `bail` in your Jest config with the `--bail` CLI flag.
|
||||
- };
|
||||
``` -->
|
||||
|
||||
```sh-diff
|
||||
```sh
|
||||
$ bun test --bail 3
|
||||
```
|
||||
|
||||
|
||||
@@ -42,21 +42,20 @@ $ proto install bun
|
||||
Bun requires a minimum of Windows 10 version 1809
|
||||
{% /callout %}
|
||||
|
||||
Bun provides a _limited, experimental_ native build for Windows. It is recommended to use Bun within [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install) and follow the above instructions. To help catch bugs, the experimental build enables many debugging assertions, which will make the binary slower than what the stable version will be.
|
||||
|
||||
To install, paste this into a terminal:
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```powershell#PowerShell/cmd.exe
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
powershell -c "irm bun.sh/install.ps1|iex"
|
||||
> powershell -c "irm bun.sh/install.ps1|iex"
|
||||
```
|
||||
|
||||
```powershell#npm
|
||||
> npm install -g bun # the last `npm` command you'll ever need
|
||||
```
|
||||
|
||||
```powershell#Scoop
|
||||
# WARNING: No stability is guaranteed on the experimental Windows builds
|
||||
scoop bucket add versions
|
||||
scoop install bun-canary
|
||||
> scoop install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -145,6 +144,8 @@ $ bun upgrade
|
||||
{% callout %}
|
||||
**Homebrew users** — To avoid conflicts with Homebrew, use `brew upgrade bun` instead.
|
||||
|
||||
**Scoop users** — To avoid conflicts with Scoop, use `scoop upgrade bun` instead.
|
||||
|
||||
**proto users** - Use `proto install bun --pin` instead.
|
||||
{% /callout %}
|
||||
|
||||
@@ -233,10 +234,14 @@ $ rm -rf ~/.bun # for macOS, Linux, and WSL
|
||||
```
|
||||
|
||||
```powershell#Windows
|
||||
powershell -c ~\.bun\uninstall.ps1
|
||||
> powershell -c ~\.bun\uninstall.ps1
|
||||
```
|
||||
|
||||
```bash#NPM
|
||||
```powershell#Scoop
|
||||
> scoop uninstall bun
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm uninstall -g bun
|
||||
```
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ I recommend using VSCode through SSH instead of Tunnels or the Tailscale extensi
|
||||
By default, running unverified scripts are blocked.
|
||||
|
||||
```ps1
|
||||
Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
> Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
```
|
||||
|
||||
### System Dependencies
|
||||
@@ -47,7 +47,7 @@ Set-ExecutionPolicy -Scope CurrentUser -ExecutionPolicy Unrestricted
|
||||
- Bun 1.1 or later. We use Bun to run it's own code generators.
|
||||
|
||||
```ps1
|
||||
irm bun.sh/install.ps1 | iex
|
||||
> irm bun.sh/install.ps1 | iex
|
||||
```
|
||||
|
||||
- [Visual Studio](https://visualstudio.microsoft.com) with the "Desktop Development with C++" workload.
|
||||
@@ -70,28 +70,28 @@ The Zig compiler is automatically downloaded, installed, and updated by the buil
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily:
|
||||
|
||||
```ps1
|
||||
irm https://get.scoop.sh | iex
|
||||
|
||||
scoop install nodejs-lts go rust nasm ruby perl
|
||||
scoop llvm@16.0.4 # scoop bug if you install llvm and the rest at the same time
|
||||
> irm https://get.scoop.sh | iex
|
||||
> scoop install nodejs-lts go rust nasm ruby perl
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
> scoop llvm@16.0.4
|
||||
```
|
||||
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
```ps1
|
||||
scoop install make cygwin python
|
||||
> scoop install make cygwin python
|
||||
```
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\env.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
.\scripts\env.ps1
|
||||
> .\scripts\env.ps1
|
||||
```
|
||||
|
||||
To verify, you can check for an MSVC-only command line such as `mt.exe`
|
||||
|
||||
```ps1
|
||||
Get-Command mt
|
||||
> Get-Command mt
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
@@ -101,24 +101,24 @@ It is not recommended to install `ninja` / `cmake` into your global path, becaus
|
||||
## Building
|
||||
|
||||
```ps1
|
||||
bun install
|
||||
> bun install
|
||||
|
||||
.\scripts\env.ps1
|
||||
.\scripts\update-submodules.ps1 # this syncs git submodule state
|
||||
.\scripts\all-dependencies.ps1 # this builds all dependencies
|
||||
.\scripts\make-old-js.ps1 # runs some old code generators
|
||||
> .\scripts\env.ps1
|
||||
> .\scripts\update-submodules.ps1 # this syncs git submodule state
|
||||
> .\scripts\all-dependencies.ps1 # this builds all dependencies
|
||||
> .\scripts\make-old-js.ps1 # runs some old code generators
|
||||
|
||||
# Configure build environment
|
||||
cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
|
||||
> cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
|
||||
|
||||
# Build bun
|
||||
ninja -Cbuild
|
||||
> ninja -Cbuild
|
||||
```
|
||||
|
||||
If this was successful, you should have a `bun-debug.exe` in the `build` folder.
|
||||
|
||||
```ps1
|
||||
.\build\bun-debug.exe --revision
|
||||
> .\build\bun-debug.exe --revision
|
||||
```
|
||||
|
||||
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
|
||||
@@ -134,15 +134,15 @@ You can run the test suite either using `bun test`, or by using the wrapper scri
|
||||
|
||||
```ps1
|
||||
# Setup
|
||||
bun i --cwd packages\bun-internal-test
|
||||
> bun i --cwd packages\bun-internal-test
|
||||
|
||||
# Run the entire test suite with reporter
|
||||
# the package.json script "test" uses "build/bun-debug.exe" by default
|
||||
bun run test
|
||||
> bun run test
|
||||
|
||||
# Run an individual test file:
|
||||
bun-debug test node\fs
|
||||
bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
> bun-debug test node\fs
|
||||
> bun-debug test "C:\bun\test\js\bun\resolve\import-meta.test.js"
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@@ -98,10 +98,10 @@ Bun.env.API_TOKEN; // => "secret"
|
||||
import.meta.env.API_TOKEN; // => "secret"
|
||||
```
|
||||
|
||||
To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging.
|
||||
To print all currently-set environment variables to the command line, run `bun --print process.env`. This is useful for debugging.
|
||||
|
||||
```sh
|
||||
$ bun run env
|
||||
$ bun --print process.env
|
||||
BAZ=stuff
|
||||
FOOBAR=aaaaaa
|
||||
<lots more lines>
|
||||
|
||||
@@ -18,7 +18,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:child_process`](https://nodejs.org/api/child_process.html)
|
||||
|
||||
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC has partial support and only current only works with other `bun` processes.
|
||||
🟡 Missing `Stream` stdio, `proc.gid` `proc.uid`. IPC cannot send socket handles and only works with other `bun` processes.
|
||||
|
||||
### [`node:cluster`](https://nodejs.org/api/cluster.html)
|
||||
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
Bun Shell makes shell scripting with JavaScript & TypeScript fun. It's a cross-platform bash-like shell with seamless JavaScript interop.
|
||||
|
||||
{% callout type="note" %}
|
||||
**Alpha-quality software**: Bun Shell is an unstable API still under development. If you have feature requests or run into bugs, please open an issue. There may be breaking changes in the future.
|
||||
{% /callout %}
|
||||
|
||||
Quickstart:
|
||||
|
||||
```js
|
||||
@@ -23,6 +19,8 @@ await $`cat < ${response} | wc -c`; // 1256
|
||||
- **Template literals**: Template literals are used to execute shell commands. This allows for easy interpolation of variables and expressions.
|
||||
- **Safety**: Bun Shell escapes all strings by default, preventing shell injection attacks.
|
||||
- **JavaScript interop**: Use `Response`, `ArrayBuffer`, `Blob`, `Bun.file(path)` and other JavaScript objects as stdin, stdout, and stderr.
|
||||
- **Shell scripting**: Bun Shell can be used to run shell scripts (`.bun.sh` files).
|
||||
- **Custom interpreter**: Bun Shell is written in Zig, along with it's lexer, parser, and interpreter. Bun Shell is a small programming language.
|
||||
|
||||
## Getting started
|
||||
|
||||
@@ -53,16 +51,66 @@ const welcome = await $`echo "Hello World!"`.text();
|
||||
console.log(welcome); // Hello World!\n
|
||||
```
|
||||
|
||||
To get stdout, stderr, and the exit code, use await or `.run`:
|
||||
By default, `await`ing will return stdout and stderr as `Buffer`s.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
const { stdout, stderr, exitCode } = await $`echo "Hello World!"`.quiet();
|
||||
const { stdout, stderr } = await $`echo "Hello World!"`.quiet();
|
||||
|
||||
console.log(stdout); // Buffer(6) [ 72, 101, 108, 108, 111, 32 ]
|
||||
console.log(stderr); // Buffer(0) []
|
||||
console.log(exitCode); // 0
|
||||
```
|
||||
|
||||
## Error handling
|
||||
|
||||
By default, non-zero exit codes will throw an error. This `ShellError` contains information about the command run.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
try {
|
||||
const output = await $`something-that-may-fail`.text();
|
||||
console.log(output);
|
||||
} catch (err) {
|
||||
console.log(`Failed with code ${err.exitCode}`);
|
||||
console.log(output.stdout.toString());
|
||||
console.log(output.stderr.toString());
|
||||
}
|
||||
```
|
||||
|
||||
Throwing can be disabled with `.nothrow()`. The result's `exitCode` will need to be checked manually.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
|
||||
const { stdout, stderr, exitCode } = await $`something-that-may-fail`
|
||||
.nothrow()
|
||||
.quiet();
|
||||
|
||||
if (exitCode !== 0) {
|
||||
console.log(`Non-zero exit code ${exitCode}`);
|
||||
}
|
||||
|
||||
console.log(stdout);
|
||||
console.log(stderr);
|
||||
```
|
||||
|
||||
The default handling of non-zero exit codes can be configured by calling `.nothrow()` or `.throws(boolean)` on the `$` function itself.
|
||||
|
||||
```js
|
||||
import { $ } from "bun";
|
||||
// shell promises will not throw, meaning you will have to
|
||||
// check for `exitCode` manually on every shell command.
|
||||
$.nothrow(); // equivilent to $.throws(false)
|
||||
|
||||
// default behavior, non-zero exit codes will throw an error
|
||||
$.throws(true);
|
||||
|
||||
// alias for $.nothrow()
|
||||
$.throws(false);
|
||||
|
||||
await $`something-that-may-fail`; // No exception thrown
|
||||
```
|
||||
|
||||
## Redirection
|
||||
@@ -89,9 +137,8 @@ To redirect stdout to a JavaScript object, use the `>` operator:
|
||||
import { $ } from "bun";
|
||||
|
||||
const buffer = Buffer.alloc(100);
|
||||
const result = await $`echo "Hello World!" > ${buffer}`;
|
||||
await $`echo "Hello World!" > ${buffer}`;
|
||||
|
||||
console.log(result.exitCode); // 0
|
||||
console.log(buffer.toString()); // Hello World!\n
|
||||
```
|
||||
|
||||
@@ -105,7 +152,7 @@ The following JavaScript objects are supported for redirection to:
|
||||
To redirect the output from JavaScript objects to stdin, use the `<` operator:
|
||||
|
||||
```js
|
||||
import { $, file } from "bun";
|
||||
import { $ } from "bun";
|
||||
|
||||
const response = new Response("hello i am a response body");
|
||||
|
||||
@@ -352,6 +399,18 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
|
||||
- `echo`: print text
|
||||
- `pwd`: print the working directory
|
||||
- `bun`: run bun in bun
|
||||
- `cat`
|
||||
- `touch`
|
||||
- `mkdir`
|
||||
- `which`
|
||||
- `mv`
|
||||
- `exit`
|
||||
- `true`
|
||||
- `false`
|
||||
- `yes`
|
||||
- `seq`
|
||||
- `dirname`
|
||||
- `basename`
|
||||
|
||||
**Partially** implemented:
|
||||
|
||||
@@ -359,9 +418,7 @@ For cross-platform compatibility, Bun Shell implements a set of builtin commands
|
||||
|
||||
**Not** implemented yet, but planned:
|
||||
|
||||
- `mkdir`: create directories
|
||||
- `cp`: copy files and directories
|
||||
- `cat`: concatenate files
|
||||
- See https://github.com/oven-sh/bun/issues/9716 for the full list.
|
||||
|
||||
## Utilities
|
||||
|
||||
@@ -404,24 +461,28 @@ await $`echo ${{ raw: '$(foo) `bar` "baz"' }}`;
|
||||
|
||||
For simple shell scripts, instead of `/bin/sh`, you can use Bun Shell to run shell scripts.
|
||||
|
||||
To do so, just run the script with `bun` on a file with the `.bun.sh` extension.
|
||||
To do so, just run the script with `bun` on a file with the `.sh` extension.
|
||||
|
||||
```sh#script.bun.sh
|
||||
```sh#script.sh
|
||||
echo "Hello World! pwd=$(pwd)"
|
||||
```
|
||||
|
||||
```sh
|
||||
$ bun ./script.bun.sh
|
||||
$ bun ./script.sh
|
||||
Hello World! pwd=/home/demo
|
||||
```
|
||||
|
||||
Scripts with Bun Shell are cross platform, which means they work on Windows:
|
||||
|
||||
```
|
||||
PS C:\Users\Demo> bun .\script.bun.sh
|
||||
```powershell
|
||||
> bun .\script.sh
|
||||
Hello World! pwd=C:\Users\Demo
|
||||
```
|
||||
|
||||
## Implementation notes
|
||||
|
||||
Bun Shell is a small programming language in Bun that is implemented in Zig. It includes a handwritten lexer, parser, and interpreter. Unlike bash, zsh, and other shells, Bun Shell runs operations concurrently.
|
||||
|
||||
## Credits
|
||||
|
||||
Large parts of this API were inspired by [zx](https://github.com/google/zx), [dax](https://github.com/dsherret/dax), and [bnx](https://github.com/wobsoriano/bnx). Thank you to the authors of those projects.
|
||||
|
||||
@@ -57,7 +57,7 @@ coverageThreshold = { lines = 0.9, functions = 0.9 }
|
||||
|
||||
### Sourcemaps
|
||||
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `false`; this will rarely be desirable outside of advanced use cases.
|
||||
Internally, Bun transpiles all files by default, so Bun automatically generates an internal [source map](https://web.dev/source-maps/) that maps lines of your original source code onto Bun's internal representation. If for any reason you want to disable this, set `test.coverageIgnoreSourcemaps` to `true`; this will rarely be desirable outside of advanced use cases.
|
||||
|
||||
```toml
|
||||
[test]
|
||||
|
||||
@@ -156,6 +156,8 @@ test.if(macOS)("runs on macOS", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.skipIf`
|
||||
|
||||
To instead skip a test based on some condition, use `test.skipIf()` or `describe.skipIf()`.
|
||||
|
||||
```ts
|
||||
@@ -166,16 +168,32 @@ test.skipIf(macOS)("runs on non-macOS", () => {
|
||||
});
|
||||
```
|
||||
|
||||
## `test.todoIf`
|
||||
|
||||
If instead you want to mark the test as TODO, use `test.todoIf()` or `describe.todoIf()`. Carefully choosing `skipIf` or `todoIf` can show a difference between, for example, intent of "invalid for this target" and "planned but not implemented yet."
|
||||
|
||||
```ts
|
||||
const macOS = process.arch === "darwin";
|
||||
|
||||
// TODO: we've only implemented this for Linux so far.
|
||||
test.todoIf(macOS)("runs on posix", () => {
|
||||
// runs if *not* macOS
|
||||
});
|
||||
```
|
||||
|
||||
## `test.each`
|
||||
|
||||
To return a function for multiple cases in a table of tests, use `test.each`.
|
||||
|
||||
```ts
|
||||
const cases = [[1, 2, 3], [3, 4, 5]];
|
||||
const cases = [
|
||||
[1, 2, 3],
|
||||
[3, 4, 5],
|
||||
];
|
||||
|
||||
test.each(cases)("%p + %p should be %p", (a, b, expected) => {
|
||||
// runs once for each test case provided
|
||||
})
|
||||
// runs once for each test case provided
|
||||
});
|
||||
```
|
||||
|
||||
There are a number of options available for formatting the case label depending on its type.
|
||||
|
||||
@@ -32,7 +32,6 @@
|
||||
"lint": "eslint './**/*.d.ts' --cache",
|
||||
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
|
||||
"test": "node packages/bun-internal-test/src/runner.node.mjs ./build/bun-debug",
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun",
|
||||
"update-known-failures": "node packages/bun-internal-test/src/update-known-windows-failures.mjs"
|
||||
"test:release": "node packages/bun-internal-test/src/runner.node.mjs ./build-release/bun"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,8 +147,6 @@ function lookupWindowsError(code) {
|
||||
|
||||
const failing_tests = [];
|
||||
const passing_tests = [];
|
||||
const fixes = [];
|
||||
const regressions = [];
|
||||
let maxFd = -1;
|
||||
function getMaxFileDescriptor(path) {
|
||||
if (process.platform === "win32") {
|
||||
@@ -214,13 +212,6 @@ async function runTest(path) {
|
||||
const name = path.replace(cwd, "").slice(1);
|
||||
let exitCode, signal, err, output;
|
||||
|
||||
const expected_crash_reason = windows
|
||||
? await readFile(resolve(path), "utf-8").then(data => {
|
||||
const match = data.match(/@known-failing-on-windows:(.*)\n/);
|
||||
return match ? match[1].trim() : null;
|
||||
})
|
||||
: null;
|
||||
|
||||
const start = Date.now();
|
||||
|
||||
const activeTestObject = { start, proc: undefined };
|
||||
@@ -249,6 +240,7 @@ Starting "${name}"
|
||||
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
|
||||
GITHUB_ACTIONS: process.env.GITHUB_ACTIONS ?? "true",
|
||||
BUN_DEBUG_QUIET_LOGS: "1",
|
||||
BUN_INSTALL_CACHE_DIR: join(TMPDIR, ".bun-install-cache"),
|
||||
[windows ? "TEMP" : "TMPDIR"]: TMPDIR,
|
||||
},
|
||||
});
|
||||
@@ -370,7 +362,7 @@ Starting "${name}"
|
||||
|
||||
console.log(
|
||||
`\x1b[2m${formatTime(duration).padStart(6, " ")}\x1b[0m ${
|
||||
passed ? "\x1b[32m✔" : expected_crash_reason ? "\x1b[33m⚠" : "\x1b[31m✖"
|
||||
passed ? "\x1b[32m✔" : "\x1b[31m✖"
|
||||
} ${name}\x1b[0m${reason ? ` (${reason})` : ""}`,
|
||||
);
|
||||
|
||||
@@ -385,20 +377,10 @@ Starting "${name}"
|
||||
}
|
||||
|
||||
if (!passed) {
|
||||
if (reason) {
|
||||
if (windows && !expected_crash_reason) {
|
||||
regressions.push({ path: name, reason, output });
|
||||
}
|
||||
}
|
||||
|
||||
failing_tests.push({ path: name, reason, output, expected_crash_reason });
|
||||
failing_tests.push({ path: name, reason, output });
|
||||
process.exitCode = 1;
|
||||
if (err) console.error(err);
|
||||
} else {
|
||||
if (windows && expected_crash_reason !== null) {
|
||||
fixes.push({ path: name, output, expected_crash_reason });
|
||||
}
|
||||
|
||||
passing_tests.push(name);
|
||||
}
|
||||
|
||||
@@ -496,30 +478,6 @@ ${header}
|
||||
|
||||
`;
|
||||
|
||||
if (fixes.length > 0) {
|
||||
report += `## Fixes\n\n`;
|
||||
report += "The following tests had @known-failing-on-windows but now pass:\n\n";
|
||||
report += fixes
|
||||
.map(
|
||||
({ path, expected_crash_reason }) => `- [\`${path}\`](${sectionLink(path)}) (before: ${expected_crash_reason})`,
|
||||
)
|
||||
.join("\n");
|
||||
report += "\n\n";
|
||||
}
|
||||
|
||||
if (regressions.length > 0) {
|
||||
report += `## Regressions\n\n`;
|
||||
report += regressions
|
||||
.map(
|
||||
({ path, reason, expected_crash_reason }) =>
|
||||
`- [\`${path}\`](${sectionLink(path)}) ${reason}${
|
||||
expected_crash_reason ? ` (expected: ${expected_crash_reason})` : ""
|
||||
}`,
|
||||
)
|
||||
.join("\n");
|
||||
report += "\n\n";
|
||||
}
|
||||
|
||||
if (failingTestDisplay.length > 0) {
|
||||
report += `## Failing tests\n\n`;
|
||||
report += failingTestDisplay;
|
||||
@@ -534,17 +492,10 @@ if (failingTestDisplay.length > 0) {
|
||||
|
||||
if (failing_tests.length) {
|
||||
report += `## Failing tests log output\n\n`;
|
||||
for (const { path, output, reason, expected_crash_reason } of failing_tests) {
|
||||
for (const { path, output, reason } of failing_tests) {
|
||||
report += `### ${path}\n\n`;
|
||||
report += "[Link to file](" + linkToGH(path) + ")\n\n";
|
||||
if (windows && reason !== expected_crash_reason) {
|
||||
report += `To mark this as a known failing test, add this to the start of the file:\n`;
|
||||
report += `\`\`\`ts\n`;
|
||||
report += `// @known-failing-on-windows: ${reason}\n`;
|
||||
report += `\`\`\`\n\n`;
|
||||
} else {
|
||||
report += `${reason}\n\n`;
|
||||
}
|
||||
report += `${reason}\n\n`;
|
||||
report += "```\n";
|
||||
report += output
|
||||
.replace(/\x1b\[[0-9;]*m/g, "")
|
||||
@@ -559,18 +510,12 @@ writeFileSync(
|
||||
JSON.stringify({
|
||||
failing_tests,
|
||||
passing_tests,
|
||||
fixes,
|
||||
regressions,
|
||||
}),
|
||||
);
|
||||
|
||||
console.log("-> test-report.md, test-report.json");
|
||||
|
||||
if (ci) {
|
||||
if (windows) {
|
||||
action.setOutput("regressing_tests", regressions.map(({ path }) => `- \`${path}\``).join("\n"));
|
||||
action.setOutput("regressing_test_count", regressions.length);
|
||||
}
|
||||
if (failing_tests.length > 0) {
|
||||
action.setFailed(`${failing_tests.length} files with failing tests`);
|
||||
}
|
||||
@@ -582,12 +527,6 @@ if (ci) {
|
||||
}
|
||||
action.summary.addRaw(truncated_report);
|
||||
await action.summary.write();
|
||||
} else {
|
||||
if (windows && (regressions.length > 0 || fixes.length > 0)) {
|
||||
console.log(
|
||||
"\n\x1b[34mnote\x1b[0;2m:\x1b[0m If you would like to update the @known-failing-on-windows annotations, run `bun update-known-failures`",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
process.exit(failing_tests.length ? 1 : process.exitCode);
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import assert from "assert";
|
||||
import { existsSync, readFileSync, writeFileSync } from "fs";
|
||||
import { join } from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
if (process.platform !== "win32") {
|
||||
console.log("This script is only intended to be run on Windows.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.chdir(join(fileURLToPath(import.meta.url), "../../../../"));
|
||||
|
||||
if (!existsSync("test-report.json")) {
|
||||
console.log("No test report found. Please run `bun run test` first.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const test_report = JSON.parse(readFileSync("test-report.json", "utf8"));
|
||||
assert(Array.isArray(test_report.failing_tests));
|
||||
|
||||
for (const { path, reason, expected_crash_reason } of test_report.failing_tests) {
|
||||
assert(path);
|
||||
assert(reason);
|
||||
|
||||
if (expected_crash_reason !== reason) {
|
||||
const old_content = readFileSync(path, "utf8");
|
||||
if (!old_content.includes("// @known-failing-on-windows")) {
|
||||
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
|
||||
if (reason) {
|
||||
content = `// @known-failing-on-windows: ${reason}\n` + content;
|
||||
}
|
||||
writeFileSync(path, content, "utf8");
|
||||
console.log(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const { path } of test_report.fixes) {
|
||||
assert(path);
|
||||
|
||||
const old_content = readFileSync(path, "utf8");
|
||||
|
||||
let content = old_content.replace(/\/\/\s*@known-failing-on-windows:.*\n/, "");
|
||||
|
||||
if (content !== old_content) {
|
||||
writeFileSync(path, content, "utf8");
|
||||
console.log(path);
|
||||
}
|
||||
}
|
||||
@@ -19,9 +19,7 @@ export default function polyfillImportMeta(metaIn: ImportMeta) {
|
||||
dir: path.dirname(metapath),
|
||||
file: path.basename(metapath),
|
||||
require: require2,
|
||||
async resolve(id: string, parent?: string) {
|
||||
return this.resolveSync(id, parent);
|
||||
},
|
||||
resolve: metaIn.resolve,
|
||||
resolveSync(id: string, parent?: string) {
|
||||
return require2.resolve(id, {
|
||||
paths: typeof parent === 'string' ? [
|
||||
|
||||
14
packages/bun-release/.gitignore
vendored
14
packages/bun-release/.gitignore
vendored
@@ -1,6 +1,8 @@
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
/npm/**/bin
|
||||
/npm/**/*.js
|
||||
/npm/**/.npmrc
|
||||
.DS_Store
|
||||
.env
|
||||
node_modules
|
||||
/npm/**/bin
|
||||
/npm/**/*.js
|
||||
/npm/**/package.json
|
||||
/npm/**/.npmrc
|
||||
*.tgz
|
||||
|
||||
Binary file not shown.
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-aarch64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-x64-baseline",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-darwin-x64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-aarch64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-x64-baseline",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"name": "@oven/bun-linux-x64",
|
||||
"version": "0.5.3",
|
||||
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"preferUnplugged": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
# Bun
|
||||
|
||||
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
|
||||
|
||||
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._
|
||||
3
packages/bun-release/npm/@oven/bun-windows-x64/README.md
Normal file
3
packages/bun-release/npm/@oven/bun-windows-x64/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Bun
|
||||
|
||||
This is the Windows x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
|
||||
@@ -1,42 +0,0 @@
|
||||
{
|
||||
"name": "bun",
|
||||
"version": "0.5.3",
|
||||
"description": "Bun is a fast all-in-one JavaScript runtime.",
|
||||
"keywords": [
|
||||
"bun",
|
||||
"bun.js",
|
||||
"node",
|
||||
"node.js",
|
||||
"runtime",
|
||||
"bundler",
|
||||
"transpiler",
|
||||
"typescript"
|
||||
],
|
||||
"homepage": "https://bun.sh",
|
||||
"bugs": "https://github.com/oven-sh/issues",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"bun": "bin/bun",
|
||||
"bunx": "bin/bun"
|
||||
},
|
||||
"repository": "https://github.com/oven-sh/bun",
|
||||
"scripts": {
|
||||
"postinstall": "node install.js"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@oven/bun-darwin-aarch64": "0.5.3",
|
||||
"@oven/bun-darwin-x64": "0.5.3",
|
||||
"@oven/bun-darwin-x64-baseline": "0.5.3",
|
||||
"@oven/bun-linux-aarch64": "0.5.3",
|
||||
"@oven/bun-linux-x64": "0.5.3",
|
||||
"@oven/bun-linux-x64-baseline": "0.5.3"
|
||||
},
|
||||
"os": [
|
||||
"darwin",
|
||||
"linux"
|
||||
],
|
||||
"cpu": [
|
||||
"arm64",
|
||||
"x64"
|
||||
]
|
||||
}
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@octokit/types": "^8.1.1",
|
||||
"bun-types": "^0.4.0",
|
||||
"bun-types": "^1.1.0",
|
||||
"prettier": "^2.8.2"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { importBun } from "../src/npm/install";
|
||||
import { importBun, optimizeBun } from "../src/npm/install";
|
||||
import { execFileSync } from "child_process";
|
||||
|
||||
importBun()
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
|
||||
import { mkdtemp } from "fs/promises";
|
||||
import { rmSync, mkdirSync } from "fs";
|
||||
import { tmpdir } from "os";
|
||||
import { dirname } from "path";
|
||||
import { fetch } from "../src/fetch";
|
||||
import { spawn } from "../src/spawn";
|
||||
import type { Platform } from "../src/platform";
|
||||
@@ -10,41 +14,51 @@ import { buildSync, formatMessagesSync } from "esbuild";
|
||||
import type { JSZipObject } from "jszip";
|
||||
import { loadAsync } from "jszip";
|
||||
import { debug, log, error } from "../src/console";
|
||||
import { expect } from "bun:test";
|
||||
|
||||
const module = "bun";
|
||||
const owner = "@oven";
|
||||
let version: string;
|
||||
|
||||
const [tag, action] = process.argv.slice(2);
|
||||
|
||||
await build(tag);
|
||||
const release = await getRelease(tag);
|
||||
const version = await getSemver(release.tag_name);
|
||||
|
||||
if (action !== "test-only") await build();
|
||||
|
||||
if (action === "publish") {
|
||||
await publish();
|
||||
} else if (action === "dry-run") {
|
||||
await publish(true);
|
||||
} else if (action === "test") {
|
||||
await publish(true);
|
||||
await test();
|
||||
} else if (action === "test-only") {
|
||||
await test();
|
||||
} else if (action) {
|
||||
throw new Error(`Unknown action: ${action}`);
|
||||
}
|
||||
process.exit(0); // HACK
|
||||
|
||||
async function build(tag?: string): Promise<void> {
|
||||
const release = await getRelease(tag);
|
||||
version = await getSemver(release.tag_name);
|
||||
async function build(): Promise<void> {
|
||||
await buildRootModule();
|
||||
for (const platform of platforms) {
|
||||
if (action !== "publish" && (platform.os !== process.platform || platform.arch !== process.arch)) continue;
|
||||
await buildModule(release, platform);
|
||||
}
|
||||
}
|
||||
|
||||
async function publish(dryRun?: boolean): Promise<void> {
|
||||
const modules = platforms.map(({ bin }) => `${owner}/${bin}`);
|
||||
const modules = platforms
|
||||
.filter(({ os, arch }) => action === "publish" || (os === process.platform && arch === process.arch))
|
||||
.map(({ bin }) => `${owner}/${bin}`);
|
||||
modules.push(module);
|
||||
for (const module of modules) {
|
||||
publishModule(module, dryRun);
|
||||
}
|
||||
}
|
||||
|
||||
async function buildRootModule() {
|
||||
async function buildRootModule(dryRun?: boolean) {
|
||||
log("Building:", `${module}@${version}`);
|
||||
const cwd = join("npm", module);
|
||||
const define = {
|
||||
@@ -54,28 +68,53 @@ async function buildRootModule() {
|
||||
};
|
||||
bundle(join("scripts", "npm-postinstall.ts"), join(cwd, "install.js"), {
|
||||
define,
|
||||
});
|
||||
bundle(join("scripts", "npm-exec.ts"), join(cwd, "bin", "bun"), {
|
||||
define,
|
||||
banner: {
|
||||
js: "#!/usr/bin/env node",
|
||||
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
|
||||
},
|
||||
});
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
is replaced by Bun's 'postinstall' script. For this to work, make
|
||||
sure that you do not use --ignore-scripts while installing.
|
||||
|
||||
The postinstall script is responsible for linking the binary file
|
||||
directly into 'node_modules/.bin' and avoiding a Node.js wrapper
|
||||
script being called on every invocation of 'bun'. If this wasn't
|
||||
done, Bun would seem to be slower than Node.js, because it would
|
||||
be executing a copy of Node.js every time!
|
||||
|
||||
Unfortunately, it is not possible to fix all cases on all platforms
|
||||
without *requiring* a postinstall script.
|
||||
`,
|
||||
);
|
||||
const os = [...new Set(platforms.map(({ os }) => os))];
|
||||
const cpu = [...new Set(platforms.map(({ arch }) => arch))];
|
||||
writeJson(join(cwd, "package.json"), {
|
||||
name: module,
|
||||
description: "Bun is a fast all-in-one JavaScript runtime.",
|
||||
version: version,
|
||||
scripts: {
|
||||
postinstall: "node install.js",
|
||||
},
|
||||
optionalDependencies: Object.fromEntries(platforms.map(({ bin }) => [`${owner}/${bin}`, version])),
|
||||
optionalDependencies: Object.fromEntries(
|
||||
platforms.map(({ bin }) => [
|
||||
`${owner}/${bin}`,
|
||||
dryRun ? `file:./oven-${bin.replaceAll("/", "-") + "-" + version + ".tgz"}` : version,
|
||||
]),
|
||||
),
|
||||
bin: {
|
||||
bun: "bin/bun",
|
||||
bunx: "bin/bun",
|
||||
bun: "bin/bun.exe",
|
||||
bunx: "bin/bun.exe",
|
||||
},
|
||||
os,
|
||||
cpu,
|
||||
keywords: ["bun", "bun.js", "node", "node.js", "runtime", "bundler", "transpiler", "typescript"],
|
||||
homepage: "https://bun.sh",
|
||||
bugs: "https://github.com/oven-sh/issues",
|
||||
license: "MIT",
|
||||
repository: "https://github.com/oven-sh/bun",
|
||||
});
|
||||
if (exists(".npmrc")) {
|
||||
copy(".npmrc", join(cwd, ".npmrc"));
|
||||
@@ -95,11 +134,17 @@ async function buildModule(
|
||||
}
|
||||
const bun = await extractFromZip(asset.browser_download_url, `${bin}/bun`);
|
||||
const cwd = join("npm", module);
|
||||
mkdirSync(dirname(join(cwd, exe)), { recursive: true });
|
||||
write(join(cwd, exe), await bun.async("arraybuffer"));
|
||||
chmod(join(cwd, exe), 0o755);
|
||||
writeJson(join(cwd, "package.json"), {
|
||||
name: module,
|
||||
version: version,
|
||||
description: "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
|
||||
homepage: "https://bun.sh",
|
||||
bugs: "https://github.com/oven-sh/issues",
|
||||
license: "MIT",
|
||||
repository: "https://github.com/oven-sh/bun",
|
||||
preferUnplugged: true,
|
||||
os: [os],
|
||||
cpu: [arch],
|
||||
@@ -111,22 +156,33 @@ async function buildModule(
|
||||
|
||||
function publishModule(name: string, dryRun?: boolean): void {
|
||||
log(dryRun ? "Dry-run Publishing:" : "Publishing:", `${name}@${version}`);
|
||||
const { exitCode, stdout, stderr } = spawn(
|
||||
"npm",
|
||||
[
|
||||
"publish",
|
||||
"--access",
|
||||
"public",
|
||||
"--tag",
|
||||
version.includes("canary") ? "canary" : "latest",
|
||||
...(dryRun ? ["--dry-run"] : []),
|
||||
],
|
||||
{
|
||||
cwd: join("npm", name),
|
||||
},
|
||||
);
|
||||
if (exitCode === 0) {
|
||||
if (!dryRun) {
|
||||
const { exitCode, stdout, stderr } = spawn(
|
||||
"npm",
|
||||
[
|
||||
"publish",
|
||||
"--access",
|
||||
"public",
|
||||
"--tag",
|
||||
version.includes("canary") ? "canary" : "latest",
|
||||
...(dryRun ? ["--dry-run"] : []),
|
||||
],
|
||||
{
|
||||
cwd: join("npm", name),
|
||||
},
|
||||
);
|
||||
error(stderr || stdout);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error("npm publish failed with code " + exitCode);
|
||||
}
|
||||
} else {
|
||||
const { exitCode, stdout, stderr } = spawn("npm", ["pack"], {
|
||||
cwd: join("npm", name),
|
||||
});
|
||||
error(stderr || stdout);
|
||||
if (exitCode !== 0) {
|
||||
throw new Error("npm pack failed with code " + exitCode);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,3 +218,86 @@ function bundle(src: string, dst: string, options: BuildOptions = {}): void {
|
||||
throw new Error(messages.join("\n"));
|
||||
}
|
||||
}
|
||||
|
||||
async function test() {
|
||||
const root = await mkdtemp(join(tmpdir(), "bun-release-test-"));
|
||||
const $ = new Bun.$.Shell().cwd(root);
|
||||
|
||||
for (const platform of platforms) {
|
||||
if (platform.os !== process.platform) continue;
|
||||
if (platform.arch !== process.arch) continue;
|
||||
copy(
|
||||
join(
|
||||
import.meta.dir,
|
||||
"../npm/@oven/",
|
||||
platform.bin,
|
||||
"oven-" + platform.bin.replaceAll("/", "-") + `-${version}.tgz`,
|
||||
),
|
||||
join(root, `${platform.bin}-${version}.tgz`),
|
||||
);
|
||||
}
|
||||
|
||||
copy(join(import.meta.dir, "../npm", "bun", "bun-" + version + ".tgz"), join(root, "bun-" + version + ".tgz"));
|
||||
|
||||
console.log(root);
|
||||
for (const [install, exec] of [
|
||||
["npm i", "npm exec"],
|
||||
["yarn set version berry; yarn add", "yarn"],
|
||||
["yarn set version latest; yarn add", "yarn"],
|
||||
["pnpm i", "pnpm"],
|
||||
["bun i", "bun run"],
|
||||
]) {
|
||||
rmSync(join(root, "node_modules"), { recursive: true, force: true });
|
||||
rmSync(join(root, "package-lock.json"), { recursive: true, force: true });
|
||||
rmSync(join(root, "package.json"), { recursive: true, force: true });
|
||||
rmSync(join(root, "pnpm-lock.yaml"), { recursive: true, force: true });
|
||||
rmSync(join(root, "yarn.lock"), { recursive: true, force: true });
|
||||
writeJson(join(root, "package.json"), {
|
||||
name: "bun-release-test",
|
||||
});
|
||||
|
||||
console.log("Testing", install + " bun");
|
||||
await $`${{ raw: install }} ./bun-${version}.tgz`;
|
||||
|
||||
console.log("Running " + exec + " bun");
|
||||
|
||||
// let output = await $`${{
|
||||
// raw: exec,
|
||||
// }} bun -- -e "console.log(JSON.stringify([Bun.version, process.platform, process.arch, process.execPath]))"`.text();
|
||||
const split = exec.split(" ");
|
||||
let {
|
||||
stdout: output,
|
||||
stderr,
|
||||
exitCode,
|
||||
} = spawn(
|
||||
split[0],
|
||||
[
|
||||
...split.slice(1),
|
||||
"--",
|
||||
"bun",
|
||||
"-e",
|
||||
"console.log(JSON.stringify([Bun.version, process.platform, process.arch, process.execPath]))",
|
||||
],
|
||||
{
|
||||
cwd: root,
|
||||
},
|
||||
);
|
||||
if (exitCode !== 0) {
|
||||
console.error(stderr);
|
||||
throw new Error("Failed to run " + exec + " bun, exit code: " + exitCode);
|
||||
}
|
||||
|
||||
try {
|
||||
output = JSON.parse(output);
|
||||
} catch (e) {
|
||||
console.log({ output });
|
||||
throw e;
|
||||
}
|
||||
|
||||
expect(output[0]).toBe(version);
|
||||
expect(output[1]).toBe(process.platform);
|
||||
expect(output[2]).toBe(process.arch);
|
||||
expect(output[3]).toStartWith(root);
|
||||
expect(output[3]).toInclude("bun");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,24 +121,14 @@ async function downloadBun(platform: Platform, dst: string): Promise<void> {
|
||||
}
|
||||
|
||||
export function optimizeBun(path: string): void {
|
||||
if (os === "win32") {
|
||||
throw new Error(
|
||||
"You must use Windows Subsystem for Linux, aka. WSL, to run bun. Learn more: https://learn.microsoft.com/en-us/windows/wsl/install",
|
||||
);
|
||||
}
|
||||
const { npm_config_user_agent } = process.env;
|
||||
if (npm_config_user_agent && /\byarn\//.test(npm_config_user_agent)) {
|
||||
throw new Error(
|
||||
"Yarn does not support bun, because it does not allow linking to binaries. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
|
||||
);
|
||||
}
|
||||
const installScript = os === "win32" ? 'powershell -c "irm bun.sh/install.ps1 | iex"' : "curl -fsSL https://bun.sh/install | bash";
|
||||
try {
|
||||
rename(path, join(__dirname, "bin", "bun"));
|
||||
rename(path, join(__dirname, "bin", "bun.exe"));
|
||||
return;
|
||||
} catch (error) {
|
||||
debug("optimizeBun failed", error);
|
||||
}
|
||||
throw new Error(
|
||||
"Your package manager doesn't seem to support bun. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
|
||||
`Your package manager doesn't seem to support bun. To use bun, install using the following command: ${installScript}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ export const os = process.platform;
|
||||
|
||||
export const arch = os === "darwin" && process.arch === "x64" && isRosetta2() ? "arm64" : process.arch;
|
||||
|
||||
export const avx2 = (arch === "x64" && os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2());
|
||||
export const avx2 =
|
||||
arch === "x64" &&
|
||||
((os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2()) || (os === "win32" && isWindowsAVX2()));
|
||||
|
||||
export type Platform = {
|
||||
os: string;
|
||||
@@ -55,6 +57,19 @@ export const platforms: Platform[] = [
|
||||
bin: "bun-linux-x64-baseline",
|
||||
exe: "bin/bun",
|
||||
},
|
||||
{
|
||||
os: "win32",
|
||||
arch: "x64",
|
||||
avx2: true,
|
||||
bin: "bun-windows-x64",
|
||||
exe: "bin/bun.exe",
|
||||
},
|
||||
{
|
||||
os: "win32",
|
||||
arch: "x64",
|
||||
bin: "bun-windows-x64-baseline",
|
||||
exe: "bin/bun.exe",
|
||||
},
|
||||
];
|
||||
|
||||
export const supportedPlatforms: Platform[] = platforms
|
||||
@@ -89,3 +104,17 @@ function isRosetta2(): boolean {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function isWindowsAVX2(): boolean {
|
||||
try {
|
||||
return (
|
||||
spawn("powershell", [
|
||||
"-c",
|
||||
`(Add-Type -MemberDefinition '[DllImport("kernel32.dll")] public static extern bool IsProcessorFeaturePresent(int ProcessorFeature);' -Name 'Kernel32' -Namespace 'Win32' -PassThru)::IsProcessorFeaturePresent(40);`,
|
||||
]).stdout == "True"
|
||||
);
|
||||
} catch (error) {
|
||||
debug("isWindowsAVX2 failed", error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
23
packages/bun-types/bun.d.ts
vendored
23
packages/bun-types/bun.d.ts
vendored
@@ -43,7 +43,7 @@ declare module "bun" {
|
||||
*
|
||||
* @param {string} command The name of the executable or script
|
||||
* @param {string} options.PATH Overrides the PATH environment variable
|
||||
* @param {string} options.cwd Limits the search to a particular directory in which to searc
|
||||
* @param {string} options.cwd When given a relative path, use this path to join it.
|
||||
*/
|
||||
function which(command: string, options?: { PATH?: string; cwd?: string }): string | null;
|
||||
|
||||
@@ -277,12 +277,16 @@ declare module "bun" {
|
||||
blob(): Promise<Blob>;
|
||||
|
||||
/**
|
||||
* Configure the shell to not throw an exception on non-zero exit codes.
|
||||
* Configure the shell to not throw an exception on non-zero exit codes. Throwing can be re-enabled with `.throws(true)`.
|
||||
*
|
||||
* By default, the shell with throw an exception on commands which return non-zero exit codes.
|
||||
*/
|
||||
nothrow(): this;
|
||||
|
||||
/**
|
||||
* Configure whether or not the shell should throw an exception on non-zero exit codes.
|
||||
*
|
||||
* By default, this is configured to `true`.
|
||||
*/
|
||||
throws(shouldThrow: boolean): this;
|
||||
}
|
||||
@@ -2988,12 +2992,19 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
/**
|
||||
* Nanoseconds since Bun.js was started as an integer.
|
||||
* Returns the number of nanoseconds since the process was started.
|
||||
*
|
||||
* This uses a high-resolution monotonic system timer.
|
||||
* This function uses a high-resolution monotonic system timer to provide precise time measurements.
|
||||
* In JavaScript, numbers are represented as double-precision floating-point values (IEEE 754),
|
||||
* which can safely represent integers up to 2^53 - 1 (Number.MAX_SAFE_INTEGER).
|
||||
*
|
||||
* After 14 weeks of consecutive uptime, this function
|
||||
* wraps
|
||||
* Due to this limitation, while the internal counter may continue beyond this point,
|
||||
* the precision of the returned value will degrade after 14.8 weeks of uptime (when the nanosecond
|
||||
* count exceeds Number.MAX_SAFE_INTEGER). Beyond this point, the function will continue to count but
|
||||
* with reduced precision, which might affect time calculations and comparisons in long-running applications.
|
||||
*
|
||||
* @returns {number} The number of nanoseconds since the process was started, with precise values up to
|
||||
* Number.MAX_SAFE_INTEGER.
|
||||
*/
|
||||
function nanoseconds(): number;
|
||||
|
||||
|
||||
36
packages/bun-types/globals.d.ts
vendored
36
packages/bun-types/globals.d.ts
vendored
@@ -1758,21 +1758,10 @@ declare global {
|
||||
* ```
|
||||
*/
|
||||
readonly env: NodeJS.ProcessEnv;
|
||||
/**
|
||||
* Resolve a module ID the same as if you imported it
|
||||
*
|
||||
* On failure, throws a `ResolveMessage`
|
||||
*/
|
||||
resolve(moduleId: string): Promise<string>;
|
||||
/**
|
||||
* Resolve a `moduleId` as though it were imported from `parent`
|
||||
*
|
||||
* On failure, throws a `ResolveMessage`
|
||||
*/
|
||||
// tslint:disable-next-line:unified-signatures
|
||||
resolve(moduleId: string, parent: string): Promise<string>;
|
||||
|
||||
/**
|
||||
* @deprecated Use `require.resolve` or `Bun.resolveSync(moduleId, path.dirname(parent))` instead
|
||||
*
|
||||
* Resolve a module ID the same as if you imported it
|
||||
*
|
||||
* The `parent` argument is optional, and defaults to the current module's path.
|
||||
@@ -1780,17 +1769,12 @@ declare global {
|
||||
resolveSync(moduleId: string, parent?: string): string;
|
||||
|
||||
/**
|
||||
* Load a CommonJS module
|
||||
* Load a CommonJS module within an ES Module. Bun's transpiler rewrites all
|
||||
* calls to `require` with `import.meta.require` when transpiling ES Modules
|
||||
* for the runtime.
|
||||
*
|
||||
* Internally, this is a synchronous version of ESModule's `import()`, with extra code for handling:
|
||||
* - CommonJS modules
|
||||
* - *.node files
|
||||
* - *.json files
|
||||
*
|
||||
* Warning: **This API is not stable** and may change in the future. Use at your
|
||||
* own risk. Usually, you should use `require` instead and Bun's transpiler
|
||||
* will automatically rewrite your code to use `import.meta.require` if
|
||||
* relevant.
|
||||
* Warning: **This API is not stable** and may change or be removed in the
|
||||
* future. Use at your own risk.
|
||||
*/
|
||||
require: NodeJS.Require;
|
||||
|
||||
@@ -1814,17 +1798,15 @@ declare global {
|
||||
readonly main: boolean;
|
||||
|
||||
/** Alias of `import.meta.dir`. Exists for Node.js compatibility */
|
||||
dirname: string;
|
||||
readonly dirname: string;
|
||||
|
||||
/** Alias of `import.meta.path`. Exists for Node.js compatibility */
|
||||
filename: string;
|
||||
readonly filename: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* NodeJS-style `require` function
|
||||
*
|
||||
* Internally, uses `import.meta.require`
|
||||
*
|
||||
* @param moduleId - The module ID to resolve
|
||||
*/
|
||||
var require: NodeJS.Require;
|
||||
|
||||
61
packages/bun-types/overrides.d.ts
vendored
61
packages/bun-types/overrides.d.ts
vendored
@@ -62,3 +62,64 @@ declare module "tls" {
|
||||
|
||||
function connect(options: BunConnectionOptions, secureConnectListener?: () => void): TLSSocket;
|
||||
}
|
||||
|
||||
declare module "util" {
|
||||
// https://nodejs.org/docs/latest/api/util.html#foreground-colors
|
||||
type ForegroundColors =
|
||||
| "black"
|
||||
| "blackBright"
|
||||
| "blue"
|
||||
| "blueBright"
|
||||
| "cyan"
|
||||
| "cyanBright"
|
||||
| "gray"
|
||||
| "green"
|
||||
| "greenBright"
|
||||
| "grey"
|
||||
| "magenta"
|
||||
| "magentaBright"
|
||||
| "red"
|
||||
| "redBright"
|
||||
| "white"
|
||||
| "whiteBright"
|
||||
| "yellow"
|
||||
| "yellowBright";
|
||||
|
||||
// https://nodejs.org/docs/latest/api/util.html#background-colors
|
||||
type BackgroundColors =
|
||||
| "bgBlack"
|
||||
| "bgBlackBright"
|
||||
| "bgBlue"
|
||||
| "bgBlueBright"
|
||||
| "bgCyan"
|
||||
| "bgCyanBright"
|
||||
| "bgGray"
|
||||
| "bgGreen"
|
||||
| "bgGreenBright"
|
||||
| "bgGrey"
|
||||
| "bgMagenta"
|
||||
| "bgMagentaBright"
|
||||
| "bgRed"
|
||||
| "bgRedBright"
|
||||
| "bgWhite"
|
||||
| "bgWhiteBright"
|
||||
| "bgYellow"
|
||||
| "bgYellowBright";
|
||||
|
||||
// https://nodejs.org/docs/latest/api/util.html#modifiers
|
||||
type Modifiers =
|
||||
| "blink"
|
||||
| "bold"
|
||||
| "dim"
|
||||
| "doubleunderline"
|
||||
| "framed"
|
||||
| "hidden"
|
||||
| "inverse"
|
||||
| "italic"
|
||||
| "overlined"
|
||||
| "reset"
|
||||
| "strikethrough"
|
||||
| "underline";
|
||||
|
||||
function styleText(format: ForegroundColors | BackgroundColors | Modifiers, text: string): string;
|
||||
}
|
||||
|
||||
19
packages/bun-types/test.d.ts
vendored
19
packages/bun-types/test.d.ts
vendored
@@ -201,12 +201,17 @@ declare module "bun:test" {
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
skipIf(condition: boolean): (label: string, fn: () => void) => void;
|
||||
/**
|
||||
* Marks this group of tests as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if these tests should be skipped
|
||||
*/
|
||||
todoIf(condition: boolean): (label: string, fn: () => void) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
* @param table Array of Arrays with the arguments that are passed into the test fn for each row.
|
||||
*/
|
||||
|
||||
each<T extends Readonly<[any, ...any[]]>>(
|
||||
table: readonly T[],
|
||||
): (label: string, fn: (...args: [...T]) => void | Promise<unknown>, options?: number | TestOptions) => void;
|
||||
@@ -414,6 +419,18 @@ declare module "bun:test" {
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
/**
|
||||
* Marks this test as to be written or to be fixed, if `condition` is true.
|
||||
*
|
||||
* @param condition if the test should be marked TODO
|
||||
*/
|
||||
todoIf(
|
||||
condition: boolean,
|
||||
): (
|
||||
label: string,
|
||||
fn: (() => void | Promise<unknown>) | ((done: (err?: unknown) => void) => void),
|
||||
options?: number | TestOptions,
|
||||
) => void;
|
||||
/**
|
||||
* Returns a function that runs for each item in `table`.
|
||||
*
|
||||
|
||||
@@ -100,9 +100,10 @@ struct us_internal_ssl_socket_context_t {
|
||||
struct us_internal_ssl_socket_t {
|
||||
struct us_socket_t s;
|
||||
SSL *ssl;
|
||||
int ssl_write_wants_read; // we use this for now
|
||||
int ssl_read_wants_write;
|
||||
int pending_handshake;
|
||||
unsigned int ssl_write_wants_read : 1; // we use this for now
|
||||
unsigned int ssl_read_wants_write : 1;
|
||||
unsigned int pending_handshake : 1;
|
||||
unsigned int received_ssl_shutdown : 1;
|
||||
};
|
||||
|
||||
int passphrase_cb(char *buf, int size, int rwflag, void *u) {
|
||||
@@ -184,6 +185,8 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
s->ssl = SSL_new(context->ssl_context);
|
||||
s->ssl_write_wants_read = 0;
|
||||
s->ssl_read_wants_write = 0;
|
||||
s->pending_handshake = 1;
|
||||
s->received_ssl_shutdown = 0;
|
||||
|
||||
SSL_set_bio(s->ssl, loop_ssl_data->shared_rbio, loop_ssl_data->shared_wbio);
|
||||
|
||||
@@ -201,11 +204,8 @@ struct us_internal_ssl_socket_t *ssl_on_open(struct us_internal_ssl_socket_t *s,
|
||||
ip_length);
|
||||
|
||||
// Hello Message!
|
||||
// always handshake after open if on_handshake is set
|
||||
if (context->on_handshake || s->pending_handshake) {
|
||||
s->pending_handshake = 1;
|
||||
us_internal_ssl_handshake(s);
|
||||
}
|
||||
// always handshake after open
|
||||
us_internal_ssl_handshake(s);
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -220,6 +220,30 @@ void us_internal_on_ssl_handshake(
|
||||
context->handshake_data = custom_data;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
// if we have some pending handshake we cancel it and try to check the
|
||||
// latest handshake error this way we will always call on_handshake with the
|
||||
// latest error before closing this should always call
|
||||
// secureConnection/secure before close if we remove this here, we will need
|
||||
// to do this check on every on_close event on sockets, fetch etc and will
|
||||
// increase complexity on a lot of places
|
||||
if (context->on_handshake != NULL) {
|
||||
struct us_bun_verify_error_t verify_error = us_internal_verify_error(s);
|
||||
context->on_handshake(s, 0, verify_error, context->handshake_data);
|
||||
}
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -230,7 +254,6 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
// will start on_open, on_writable or on_data
|
||||
if (!s->ssl) {
|
||||
s->pending_handshake = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -243,7 +266,8 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->pending_handshake = 0;
|
||||
|
||||
struct us_bun_verify_error_t verify_error = (struct us_bun_verify_error_t){
|
||||
@@ -255,6 +279,12 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
|
||||
int result = SSL_do_handshake(s->ssl);
|
||||
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->received_ssl_shutdown = 1;
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return;
|
||||
}
|
||||
if (result <= 0) {
|
||||
int err = SSL_get_error(s->ssl, result);
|
||||
// as far as I know these are the only errors we want to handle
|
||||
@@ -272,16 +302,14 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
on_handshake(s, 0, verify_error, custom_data);
|
||||
}
|
||||
return;
|
||||
} else {
|
||||
|
||||
s->pending_handshake = 1;
|
||||
context->on_handshake = on_handshake;
|
||||
context->handshake_data = custom_data;
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
}
|
||||
s->pending_handshake = 1;
|
||||
context->on_handshake = on_handshake;
|
||||
context->handshake_data = custom_data;
|
||||
// Ensure that we'll cycle through internal openssl's state
|
||||
if (!us_socket_is_closed(0, &s->s) &&
|
||||
!us_internal_ssl_socket_is_shut_down(s)) {
|
||||
us_socket_write(1, loop_ssl_data->ssl_socket, "\0", 0, 0);
|
||||
}
|
||||
} else {
|
||||
s->pending_handshake = 0;
|
||||
@@ -299,31 +327,12 @@ void us_internal_ssl_handshake(struct us_internal_ssl_socket_t *s) {
|
||||
}
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(struct us_internal_ssl_socket_t *s, int code,
|
||||
void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
if (context->on_handshake != NULL) {
|
||||
struct us_bun_verify_error_t verify_error = us_internal_verify_error(s);
|
||||
context->on_handshake(s, 0, verify_error, context->handshake_data);
|
||||
}
|
||||
}
|
||||
|
||||
return (struct us_internal_ssl_socket_t *)us_socket_close(
|
||||
0, (struct us_socket_t *)s, code, reason);
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_close(struct us_internal_ssl_socket_t *s, int code, void *reason) {
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
if (s->pending_handshake) {
|
||||
s->pending_handshake = 0;
|
||||
}
|
||||
|
||||
s->pending_handshake = 0;
|
||||
SSL_free(s->ssl);
|
||||
|
||||
return context->on_close(s, code, reason);
|
||||
@@ -343,7 +352,6 @@ ssl_on_end(struct us_internal_ssl_socket_t *s) {
|
||||
// this whole function needs a complete clean-up
|
||||
struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
void *data, int length) {
|
||||
|
||||
// note: this context can change when we adopt the socket!
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
@@ -363,8 +371,8 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
loop_ssl_data->ssl_socket = &s->s;
|
||||
loop_ssl_data->msg_more = 0;
|
||||
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
if (us_socket_is_closed(0, &s->s) || s->received_ssl_shutdown) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (us_internal_ssl_socket_is_shut_down(s)) {
|
||||
@@ -395,11 +403,18 @@ struct us_internal_ssl_socket_t *ssl_on_data(struct us_internal_ssl_socket_t *s,
|
||||
// check-ups
|
||||
int read = 0;
|
||||
restart:
|
||||
while (1) {
|
||||
// read until shutdown
|
||||
while (!s->received_ssl_shutdown) {
|
||||
int just_read = SSL_read(s->ssl,
|
||||
loop_ssl_data->ssl_read_output +
|
||||
LIBUS_RECV_BUFFER_PADDING + read,
|
||||
LIBUS_RECV_BUFFER_LENGTH - read);
|
||||
// we need to check if we received a shutdown here
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
s->pending_handshake = 0;
|
||||
s->received_ssl_shutdown = 1;
|
||||
// we will only close after we handle the data and errors
|
||||
}
|
||||
if (just_read <= 0) {
|
||||
int err = SSL_get_error(s->ssl, just_read);
|
||||
|
||||
@@ -417,7 +432,7 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
}
|
||||
@@ -456,7 +471,7 @@ restart:
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING,
|
||||
read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -476,7 +491,7 @@ restart:
|
||||
// emit data and restart
|
||||
s = context->on_data(
|
||||
s, loop_ssl_data->ssl_read_output + LIBUS_RECV_BUFFER_PADDING, read);
|
||||
if (us_socket_is_closed(0, &s->s)) {
|
||||
if (!s || us_socket_is_closed(0, &s->s)) {
|
||||
return s;
|
||||
}
|
||||
|
||||
@@ -484,7 +499,11 @@ restart:
|
||||
goto restart;
|
||||
}
|
||||
}
|
||||
|
||||
// we received the shutdown after reading so we close
|
||||
if (s->received_ssl_shutdown) {
|
||||
us_internal_ssl_socket_close(s, 0, NULL);
|
||||
return NULL;
|
||||
}
|
||||
// trigger writable if we failed last write with want read
|
||||
if (s->ssl_write_wants_read) {
|
||||
s->ssl_write_wants_read = 0;
|
||||
@@ -502,24 +521,11 @@ restart:
|
||||
}
|
||||
}
|
||||
|
||||
// check this then?
|
||||
if (SSL_get_shutdown(s->ssl) & SSL_RECEIVED_SHUTDOWN) {
|
||||
// printf("SSL_RECEIVED_SHUTDOWN\n");
|
||||
|
||||
// exit(-2);
|
||||
|
||||
// not correct anyways!
|
||||
s = us_internal_ssl_socket_close(s, 0, NULL);
|
||||
|
||||
// us_
|
||||
}
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
struct us_internal_ssl_socket_t *
|
||||
ssl_on_writable(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
struct us_internal_ssl_socket_context_t *context =
|
||||
(struct us_internal_ssl_socket_context_t *)us_socket_context(0, &s->s);
|
||||
|
||||
@@ -940,7 +946,9 @@ const char *us_X509_error_code(long err) { // NOLINT(runtime/int)
|
||||
|
||||
long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
const SSL *ssl,
|
||||
long def) { // NOLINT(runtime/int)
|
||||
long def) { // NOLINT(runtime/int)
|
||||
if (!ssl)
|
||||
return def;
|
||||
long err = def; // NOLINT(runtime/int)
|
||||
X509 *peer_cert = SSL_get_peer_certificate(ssl);
|
||||
if (peer_cert) {
|
||||
@@ -964,8 +972,8 @@ long us_internal_verify_peer_certificate( // NOLINT(runtime/int)
|
||||
|
||||
struct us_bun_verify_error_t
|
||||
us_internal_verify_error(struct us_internal_ssl_socket_t *s) {
|
||||
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s)) {
|
||||
if (us_socket_is_closed(0, &s->s) || us_internal_ssl_socket_is_shut_down(s) ||
|
||||
s->received_ssl_shutdown) {
|
||||
return (struct us_bun_verify_error_t){
|
||||
.error = 0, .code = NULL, .reason = NULL};
|
||||
}
|
||||
@@ -1930,7 +1938,8 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
socket->ssl = NULL;
|
||||
socket->ssl_write_wants_read = 0;
|
||||
socket->ssl_read_wants_write = 0;
|
||||
|
||||
socket->pending_handshake = 1;
|
||||
socket->received_ssl_shutdown = 0;
|
||||
return socket;
|
||||
}
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#include "libusockets.h"
|
||||
#include "internal/internal.h"
|
||||
#include <stdlib.h>
|
||||
@@ -380,7 +380,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int events)
|
||||
}
|
||||
|
||||
break;
|
||||
} while (1);
|
||||
} while (s);
|
||||
}
|
||||
|
||||
/* Such as epollerr epollhup */
|
||||
|
||||
@@ -8,8 +8,8 @@
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
@@ -19,7 +19,7 @@
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#include "libuwebsockets.h"
|
||||
#include <string_view>
|
||||
#include "App.h"
|
||||
@@ -976,7 +976,7 @@ extern "C"
|
||||
return value.length();
|
||||
}
|
||||
#endif
|
||||
uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uintmax_t total_size, bool close_connection)
|
||||
uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1123,7 +1123,7 @@ extern "C"
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
return uwsRes->write(std::string_view(data, length));
|
||||
}
|
||||
uintmax_t uws_res_get_write_offset(int ssl, uws_res_t *res)
|
||||
uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1133,7 +1133,7 @@ extern "C"
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
return uwsRes->getWriteOffset();
|
||||
}
|
||||
void uws_res_override_write_offset(int ssl, uws_res_t *res, uintmax_t offset)
|
||||
void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
@@ -1157,18 +1157,18 @@ extern "C"
|
||||
return uwsRes->hasResponded();
|
||||
}
|
||||
|
||||
void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uintmax_t, void *optional_data), void *optional_data)
|
||||
void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *optional_data)
|
||||
{
|
||||
if (ssl)
|
||||
{
|
||||
uWS::HttpResponse<true> *uwsRes = (uWS::HttpResponse<true> *)res;
|
||||
uwsRes->onWritable([handler, res, optional_data](uintmax_t a)
|
||||
uwsRes->onWritable([handler, res, optional_data](uint64_t a)
|
||||
{ return handler(res, a, optional_data); });
|
||||
}
|
||||
else
|
||||
{
|
||||
uWS::HttpResponse<false> *uwsRes = (uWS::HttpResponse<false> *)res;
|
||||
uwsRes->onWritable([handler, res, optional_data](uintmax_t a)
|
||||
uwsRes->onWritable([handler, res, optional_data](uint64_t a)
|
||||
{ return handler(res, a, optional_data); });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
/*
|
||||
* Copyright 2022 Ciro Spaciari
|
||||
*
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
@@ -19,7 +19,7 @@
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef LIBUWS_CAPI_HEADER
|
||||
#define LIBUWS_CAPI_HEADER
|
||||
|
||||
@@ -209,7 +209,7 @@ extern "C"
|
||||
|
||||
//Response
|
||||
DLL_EXPORT void uws_res_end(int ssl, uws_res_t *res, const char *data, size_t length, bool close_connection);
|
||||
DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uintmax_t total_size, bool close_connection);
|
||||
DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection);
|
||||
DLL_EXPORT void uws_res_cork(int ssl, uws_res_t *res, void(*callback)(uws_res_t *res, void* user_data) ,void* user_data);
|
||||
DLL_EXPORT void uws_res_pause(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_resume(int ssl, uws_res_t *res);
|
||||
@@ -220,10 +220,10 @@ extern "C"
|
||||
DLL_EXPORT void uws_res_write_header_int(int ssl, uws_res_t *res, const char *key, size_t key_length, uint64_t value);
|
||||
DLL_EXPORT void uws_res_end_without_body(int ssl, uws_res_t *res, bool close_connection);
|
||||
DLL_EXPORT bool uws_res_write(int ssl, uws_res_t *res, const char *data, size_t length);
|
||||
DLL_EXPORT uintmax_t uws_res_get_write_offset(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uintmax_t offset);
|
||||
DLL_EXPORT uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset);
|
||||
DLL_EXPORT bool uws_res_has_responded(int ssl, uws_res_t *res);
|
||||
DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uintmax_t, void *optional_data), void *user_data);
|
||||
DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *user_data);
|
||||
DLL_EXPORT void uws_res_on_aborted(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, void *optional_data), void *optional_data);
|
||||
DLL_EXPORT void uws_res_on_data(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, const char *chunk, size_t chunk_length, bool is_end, void *optional_data), void *optional_data);
|
||||
DLL_EXPORT void uws_res_upgrade(int ssl, uws_res_t *res, void *data, const char *sec_web_socket_key, size_t sec_web_socket_key_length, const char *sec_web_socket_protocol, size_t sec_web_socket_protocol_length, const char *sec_web_socket_extensions, size_t sec_web_socket_extensions_length, uws_socket_context_t *ws);
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_ASYNCSOCKET_H
|
||||
#define UWS_ASYNCSOCKET_H
|
||||
|
||||
@@ -255,10 +255,8 @@ public:
|
||||
if (asyncSocketData->buffer.length()) {
|
||||
/* Write off as much as we can */
|
||||
int written = us_socket_write(SSL, (us_socket_t *) this, asyncSocketData->buffer.data(), (int) asyncSocketData->buffer.length(), /*nextLength != 0 | */length);
|
||||
|
||||
/* On failure return, otherwise continue down the function */
|
||||
if ((unsigned int) written < asyncSocketData->buffer.length()) {
|
||||
|
||||
/* Update buffering (todo: we can do better here if we keep track of what happens to this guy later on) */
|
||||
asyncSocketData->buffer.erase((unsigned int) written);
|
||||
|
||||
@@ -268,7 +266,6 @@ public:
|
||||
} else {
|
||||
/* This path is horrible and points towards erroneous usage */
|
||||
asyncSocketData->buffer.append(src, (unsigned int) length);
|
||||
|
||||
return {length, true};
|
||||
}
|
||||
}
|
||||
@@ -310,7 +307,6 @@ public:
|
||||
if (optionally) {
|
||||
return {written, true};
|
||||
}
|
||||
|
||||
/* Fall back to worst possible case (should be very rare for HTTP) */
|
||||
/* At least we can reserve room for next chunk if we know it up front */
|
||||
if (nextLength) {
|
||||
@@ -344,7 +340,7 @@ public:
|
||||
auto [written, failed] = write(loopData->corkBuffer, (int) loopData->corkOffset, false, length);
|
||||
loopData->corkOffset = 0;
|
||||
|
||||
if (failed) {
|
||||
if (failed && optionally) {
|
||||
/* We do not need to care for buffering here, write does that */
|
||||
return {0, true};
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ extern "C" {
|
||||
}
|
||||
|
||||
#include "Http3ResponseData.h"
|
||||
|
||||
// clang-format off
|
||||
namespace uWS {
|
||||
|
||||
/* Is a quic stream */
|
||||
@@ -40,7 +40,7 @@ namespace uWS {
|
||||
return this;
|
||||
}
|
||||
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uintmax_t totalSize = 0) {
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uint64_t totalSize = 0) {
|
||||
Http3ResponseData *responseData = (Http3ResponseData *) us_quic_stream_ext((us_quic_stream_t *) this);
|
||||
|
||||
writeStatus("200 OK");
|
||||
@@ -109,7 +109,7 @@ namespace uWS {
|
||||
return this;
|
||||
}
|
||||
|
||||
Http3Response *onWritable(MoveOnlyFunction<bool(uintmax_t)> &&handler) {
|
||||
Http3Response *onWritable(MoveOnlyFunction<bool(uint64_t)> &&handler) {
|
||||
Http3ResponseData *responseData = (Http3ResponseData *) us_quic_stream_ext((us_quic_stream_t *) this);
|
||||
|
||||
responseData->onWritable = std::move(handler);
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
#ifndef UWS_H3RESPONSEDATA_H
|
||||
#define UWS_H3RESPONSEDATA_H
|
||||
|
||||
#include "MoveOnlyFunction.h"
|
||||
#include "AsyncSocketData.h"
|
||||
#include "MoveOnlyFunction.h"
|
||||
#include <string_view>
|
||||
|
||||
// clang-format off
|
||||
namespace uWS {
|
||||
struct Http3ResponseData {
|
||||
|
||||
MoveOnlyFunction<void()> onAborted = nullptr;
|
||||
MoveOnlyFunction<void(std::string_view, bool)> onData = nullptr;
|
||||
MoveOnlyFunction<bool(uintmax_t)> onWritable = nullptr;
|
||||
MoveOnlyFunction<bool(uint64_t)> onWritable = nullptr;
|
||||
|
||||
/* Status is always first header just like for h1 */
|
||||
unsigned int headerOffset = 0;
|
||||
|
||||
/* Write offset */
|
||||
uintmax_t offset = 0;
|
||||
uint64_t offset = 0;
|
||||
|
||||
BackPressure backpressure;
|
||||
};
|
||||
|
||||
@@ -374,9 +374,7 @@ private:
|
||||
return s;
|
||||
}
|
||||
|
||||
/* We don't want to fall through since we don't want to mess with timeout.
|
||||
* It makes little sense to drain any backpressure when the user has registered onWritable. */
|
||||
return s;
|
||||
/* We need to drain any remaining buffered data if success == true*/
|
||||
}
|
||||
|
||||
/* Drain any socket buffer, this might empty our backpressure and thus finish the request */
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_HTTPRESPONSE_H
|
||||
#define UWS_HTTPRESPONSE_H
|
||||
|
||||
@@ -87,7 +87,7 @@ public:
|
||||
|
||||
/* Returns true on success, indicating that it might be feasible to write more data.
|
||||
* Will start timeout if stream reaches totalSize or write failure. */
|
||||
bool internalEnd(std::string_view data, uintmax_t totalSize, bool optional, bool allowContentLength = true, bool closeConnection = false) {
|
||||
bool internalEnd(std::string_view data, uint64_t totalSize, bool optional, bool allowContentLength = true, bool closeConnection = false) {
|
||||
/* Write status if not already done */
|
||||
writeStatus(HTTP_200_OK);
|
||||
|
||||
@@ -435,7 +435,7 @@ public:
|
||||
|
||||
/* Try and end the response. Returns [true, true] on success.
|
||||
* Starts a timeout in some cases. Returns [ok, hasResponded] */
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uintmax_t totalSize = 0, bool closeConnection = false) {
|
||||
std::pair<bool, bool> tryEnd(std::string_view data, uint64_t totalSize = 0, bool closeConnection = false) {
|
||||
return {internalEnd(data, totalSize, true, true, closeConnection), hasResponded()};
|
||||
}
|
||||
|
||||
@@ -491,14 +491,14 @@ public:
|
||||
}
|
||||
|
||||
/* Get the current byte write offset for this Http response */
|
||||
uintmax_t getWriteOffset() {
|
||||
uint64_t getWriteOffset() {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
return httpResponseData->offset;
|
||||
}
|
||||
|
||||
/* If you are messing around with sendfile you might want to override the offset. */
|
||||
void overrideWriteOffset(uintmax_t offset) {
|
||||
void overrideWriteOffset(uint64_t offset) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->offset = offset;
|
||||
@@ -566,13 +566,21 @@ public:
|
||||
}
|
||||
|
||||
/* Attach handler for writable HTTP response */
|
||||
HttpResponse *onWritable(MoveOnlyFunction<bool(uintmax_t)> &&handler) {
|
||||
HttpResponse *onWritable(MoveOnlyFunction<bool(uint64_t)> &&handler) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->onWritable = std::move(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
/* Remove handler for writable HTTP response */
|
||||
HttpResponse *clearOnWritable() {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->onWritable = nullptr;
|
||||
return this;
|
||||
}
|
||||
|
||||
/* Attach handler for aborted HTTP request */
|
||||
HttpResponse *onAborted(MoveOnlyFunction<void()> &&handler) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
@@ -591,7 +599,7 @@ public:
|
||||
}
|
||||
|
||||
|
||||
void setWriteOffset(uintmax_t offset) {
|
||||
void setWriteOffset(uint64_t offset) {
|
||||
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
|
||||
|
||||
httpResponseData->offset = offset;
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// clang-format off
|
||||
#ifndef UWS_HTTPRESPONSEDATA_H
|
||||
#define UWS_HTTPRESPONSEDATA_H
|
||||
|
||||
@@ -46,12 +46,12 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
}
|
||||
|
||||
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
|
||||
bool callOnWritable(uintmax_t offset) {
|
||||
bool callOnWritable(uint64_t offset) {
|
||||
/* Borrow real onWritable */
|
||||
MoveOnlyFunction<bool(uintmax_t)> borrowedOnWritable = std::move(onWritable);
|
||||
MoveOnlyFunction<bool(uint64_t)> borrowedOnWritable = std::move(onWritable);
|
||||
|
||||
/* Set onWritable to placeholder */
|
||||
onWritable = [](uintmax_t) {return true;};
|
||||
onWritable = [](uint64_t) {return true;};
|
||||
|
||||
/* Run borrowed onWritable */
|
||||
bool ret = borrowedOnWritable(offset);
|
||||
@@ -75,11 +75,11 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
};
|
||||
|
||||
/* Per socket event handlers */
|
||||
MoveOnlyFunction<bool(uintmax_t)> onWritable;
|
||||
MoveOnlyFunction<bool(uint64_t)> onWritable;
|
||||
MoveOnlyFunction<void()> onAborted;
|
||||
MoveOnlyFunction<void(std::string_view, bool)> inStream; // onData
|
||||
/* Outgoing offset */
|
||||
uintmax_t offset = 0;
|
||||
uint64_t offset = 0;
|
||||
|
||||
/* Let's track number of bytes since last timeout reset in data handler */
|
||||
unsigned int received_bytes_per_timeout = 0;
|
||||
|
||||
@@ -5,7 +5,7 @@ Push-Location (Join-Path $BUN_DEPS_DIR 'libarchive')
|
||||
try {
|
||||
Set-Location (mkdir -Force build)
|
||||
|
||||
Run cmake @CMAKE_FLAGS -DBUILD_SHARED_LIBS=OFF -DENABLE_TEST=OFF -DENABLE_INSTALL=OFF -DENABLE_WERROR=0 ..
|
||||
Run cmake @CMAKE_FLAGS -DBUILD_SHARED_LIBS=OFF -DENABLE_TEST=OFF -DENABLE_INSTALL=OFF -DENABLE_WERROR=0 -DENABLE_ICONV=0 -DENABLE_LibGCC=0 -DENABLE_LZMA=0 -DENABLE_LZ4=0 -DENABLE_LIBXML2=0 -DENABLE_LIBB2=0 -DENABLE_OPENSSL=0 -DENABLE_CAT=0 ..
|
||||
Run cmake --build . --clean-first --config Release
|
||||
|
||||
Copy-Item libarchive\archive_static.lib $BUN_DEPS_OUT_DIR\archive.lib
|
||||
|
||||
@@ -37,7 +37,6 @@ task() {
|
||||
fi
|
||||
}
|
||||
|
||||
task bun ./src/codegen/bundle-functions.ts --debug=OFF "$OUT"
|
||||
task bun ./src/codegen/bundle-modules.ts --debug=OFF "$OUT"
|
||||
|
||||
rm -rf "$OUT/tmp_functions"
|
||||
|
||||
@@ -10,8 +10,6 @@ const Global = bun.Global;
|
||||
const Environment = bun.Environment;
|
||||
const Syscall = bun.sys;
|
||||
|
||||
const exe_suffix = bun.exe_suffix;
|
||||
|
||||
const w = std.os.windows;
|
||||
|
||||
pub const StandaloneModuleGraph = struct {
|
||||
@@ -277,19 +275,16 @@ pub const StandaloneModuleGraph = struct {
|
||||
}.toClean;
|
||||
|
||||
const cloned_executable_fd: bun.FileDescriptor = brk: {
|
||||
var self_buf: [bun.MAX_PATH_BYTES + 1]u8 = undefined;
|
||||
const self_exe = std.fs.selfExePath(&self_buf) catch |err| {
|
||||
const self_exe = bun.selfExePath() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get self executable path: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
self_buf[self_exe.len] = 0;
|
||||
const self_exeZ = self_buf[0..self_exe.len :0];
|
||||
|
||||
if (comptime Environment.isWindows) {
|
||||
// copy self and then open it for writing
|
||||
|
||||
var in_buf: bun.WPathBuffer = undefined;
|
||||
strings.copyU8IntoU16(&in_buf, self_exeZ);
|
||||
strings.copyU8IntoU16(&in_buf, self_exe);
|
||||
in_buf[self_exe.len] = 0;
|
||||
const in = in_buf[0..self_exe.len :0];
|
||||
var out_buf: bun.WPathBuffer = undefined;
|
||||
@@ -301,7 +296,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
};
|
||||
|
||||
const file = bun.sys.openFileAtWindows(
|
||||
bun.invalid_fd,
|
||||
out,
|
||||
@@ -322,7 +316,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
if (comptime Environment.isMac) {
|
||||
// if we're on a mac, use clonefile() if we can
|
||||
// failure is okay, clonefile is just a fast path.
|
||||
if (Syscall.clonefile(self_exeZ, zname) == .result) {
|
||||
if (Syscall.clonefile(self_exe, zname) == .result) {
|
||||
switch (Syscall.open(zname, std.os.O.RDWR | std.os.O.CLOEXEC, 0)) {
|
||||
.result => |res| break :brk res,
|
||||
.err => {},
|
||||
@@ -376,7 +370,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
};
|
||||
const self_fd = brk2: {
|
||||
for (0..3) |retry| {
|
||||
switch (Syscall.open(self_exeZ, std.os.O.CLOEXEC | std.os.O.RDONLY, 0)) {
|
||||
switch (Syscall.open(self_exe, std.os.O.CLOEXEC | std.os.O.RDONLY, 0)) {
|
||||
.result => |res| break :brk2 res,
|
||||
.err => |err| {
|
||||
if (retry < 2) {
|
||||
@@ -665,30 +659,32 @@ pub const StandaloneModuleGraph = struct {
|
||||
return try StandaloneModuleGraph.fromBytes(allocator, to_read, offsets);
|
||||
}
|
||||
|
||||
fn isBuiltInExe(argv0: []const u8) bool {
|
||||
/// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".
|
||||
/// this is a cheap way to avoid the extra overhead of opening the executable, and also just makes sense.
|
||||
fn isBuiltInExe(comptime T: type, argv0: []const T) bool {
|
||||
if (argv0.len == 0) return false;
|
||||
|
||||
if (argv0.len == 3) {
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "bun" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun"), false)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (argv0.len == 4) {
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "bunx" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bunx"), false)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (bun.strings.eqlComptimeIgnoreLen(argv0, "node" ++ exe_suffix)) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "node"), false)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime Environment.isDebug) {
|
||||
if (bun.strings.eqlComptime(argv0, "bun-debug")) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun-debug"), true)) {
|
||||
return true;
|
||||
}
|
||||
if (bun.strings.eqlComptime(argv0, "bun-debugx")) {
|
||||
if (bun.strings.eqlComptimeCheckLenWithType(T, argv0, bun.strings.literal(T, "bun-debugx"), true)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -697,13 +693,10 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
|
||||
fn openSelf() std.fs.OpenSelfExeError!bun.FileDescriptor {
|
||||
// heuristic: `bun build --compile` won't be supported if the name is "bun", "bunx", or "node".
|
||||
// this is a cheap way to avoid the extra overhead
|
||||
// of opening the executable and also just makes sense.
|
||||
if (!Environment.isWindows) {
|
||||
const argv = bun.argv();
|
||||
if (argv.len > 0) {
|
||||
if (isBuiltInExe(argv[0])) {
|
||||
if (isBuiltInExe(u8, argv[0])) {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
}
|
||||
@@ -733,10 +726,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
.mac => {
|
||||
// Use of MAX_PATH_BYTES here is valid as the resulting path is immediately
|
||||
// opened with no modification.
|
||||
var buf: [bun.MAX_PATH_BYTES]u8 = undefined;
|
||||
const self_exe_path = try std.fs.selfExePath(&buf);
|
||||
buf[self_exe_path.len] = 0;
|
||||
const file = try std.fs.openFileAbsoluteZ(buf[0..self_exe_path.len :0].ptr, .{});
|
||||
const self_exe_path = try bun.selfExePath();
|
||||
const file = try std.fs.openFileAbsoluteZ(self_exe_path.ptr, .{});
|
||||
return bun.toFD(file.handle);
|
||||
},
|
||||
.windows => {
|
||||
@@ -746,6 +737,13 @@ pub const StandaloneModuleGraph = struct {
|
||||
var nt_path_buf: bun.WPathBuffer = undefined;
|
||||
const nt_path = bun.strings.addNTPathPrefix(&nt_path_buf, image_path);
|
||||
|
||||
const basename_start = std.mem.lastIndexOfScalar(u16, nt_path, '\\') orelse
|
||||
return error.FileNotFound;
|
||||
const basename = nt_path[basename_start + 1 .. nt_path.len - ".exe".len];
|
||||
if (isBuiltInExe(u16, basename)) {
|
||||
return error.FileNotFound;
|
||||
}
|
||||
|
||||
return bun.sys.openFileAtWindows(
|
||||
bun.FileDescriptor.cwd(),
|
||||
nt_path,
|
||||
|
||||
@@ -7,7 +7,7 @@ const bun = @import("root").bun;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
(@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(buffer.ptr) + buffer.len));
|
||||
(@intFromPtr(slice.ptr) + slice.len * @sizeOf(T)) <= (@intFromPtr(buffer.ptr) + buffer.len * @sizeOf(T)));
|
||||
}
|
||||
|
||||
/// Checks if a slice's pointer is contained within another slice.
|
||||
|
||||
@@ -325,46 +325,19 @@ pub const FileWriter = Writer(std.fs.File);
|
||||
pub const Api = struct {
|
||||
pub const Loader = enum(u8) {
|
||||
_none,
|
||||
/// jsx
|
||||
jsx,
|
||||
|
||||
/// js
|
||||
js,
|
||||
|
||||
/// ts
|
||||
ts,
|
||||
|
||||
/// tsx
|
||||
tsx,
|
||||
|
||||
/// css
|
||||
css,
|
||||
|
||||
/// file
|
||||
file,
|
||||
|
||||
/// json
|
||||
json,
|
||||
|
||||
/// toml
|
||||
toml,
|
||||
|
||||
/// wasm
|
||||
wasm,
|
||||
|
||||
/// napi
|
||||
napi,
|
||||
|
||||
/// base64
|
||||
base64,
|
||||
|
||||
/// dataurl
|
||||
dataurl,
|
||||
|
||||
/// text
|
||||
text,
|
||||
|
||||
/// sqlite
|
||||
sqlite,
|
||||
|
||||
_,
|
||||
|
||||
@@ -398,19 +398,19 @@ pub const Closer = struct {
|
||||
|
||||
pub fn close(fd: uv.uv_file, loop: *uv.Loop) void {
|
||||
var closer = Closer.new(.{});
|
||||
|
||||
if (uv.uv_fs_close(loop, &closer.io_request, fd, &onClose).errEnum()) |err| {
|
||||
// data is not overridden by libuv when calling uv_fs_close, its ok to set it here
|
||||
closer.io_request.data = closer;
|
||||
if (uv.uv_fs_close(loop, &closer.io_request, fd, onClose).errEnum()) |err| {
|
||||
Output.debugWarn("libuv close() failed = {}", .{err});
|
||||
closer.destroy();
|
||||
return;
|
||||
}
|
||||
|
||||
closer.io_request.data = closer;
|
||||
}
|
||||
|
||||
fn onClose(req: *uv.fs_t) callconv(.C) void {
|
||||
var closer = @fieldParentPtr(Closer, "io_request", req);
|
||||
std.debug.assert(closer == @as(*Closer, @alignCast(@ptrCast(req.data.?))));
|
||||
bun.sys.syslog("uv_fs_close() = {}", .{req.result});
|
||||
bun.sys.syslog("uv_fs_close({}) = {}", .{ bun.toFD(req.file.fd), req.result });
|
||||
|
||||
if (comptime Environment.allow_assert) {
|
||||
if (closer.io_request.result.errEnum()) |err| {
|
||||
@@ -418,7 +418,7 @@ pub const Closer = struct {
|
||||
}
|
||||
}
|
||||
|
||||
uv.uv_fs_req_cleanup(req);
|
||||
req.deinit();
|
||||
closer.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1216,13 +1216,23 @@ pub const Formatter = struct {
|
||||
const CellType = JSC.C.CellType;
|
||||
threadlocal var name_buf: [512]u8 = undefined;
|
||||
|
||||
/// https://console.spec.whatwg.org/#formatter
|
||||
const PercentTag = enum {
|
||||
s, // s
|
||||
i, // i or d
|
||||
f, // f
|
||||
o, // o
|
||||
O, // O
|
||||
c, // c
|
||||
};
|
||||
|
||||
fn writeWithFormatting(
|
||||
this: *ConsoleObject.Formatter,
|
||||
comptime Writer: type,
|
||||
writer_: Writer,
|
||||
comptime Slice: type,
|
||||
slice_: Slice,
|
||||
globalThis: *JSGlobalObject,
|
||||
global: *JSGlobalObject,
|
||||
comptime enable_ansi_colors: bool,
|
||||
) void {
|
||||
var writer = WrappedWriter(Writer){
|
||||
@@ -1246,12 +1256,13 @@ pub const Formatter = struct {
|
||||
if (i >= len)
|
||||
break;
|
||||
|
||||
const token = switch (slice[i]) {
|
||||
's' => Tag.String,
|
||||
'f' => Tag.Double,
|
||||
'o' => Tag.Undefined,
|
||||
'O' => Tag.Object,
|
||||
'd', 'i' => Tag.Integer,
|
||||
const token: PercentTag = switch (slice[i]) {
|
||||
's' => .s,
|
||||
'f' => .f,
|
||||
'o' => .o,
|
||||
'O' => .O,
|
||||
'd', 'i' => .i,
|
||||
'c' => .c,
|
||||
else => continue,
|
||||
};
|
||||
|
||||
@@ -1268,16 +1279,143 @@ pub const Formatter = struct {
|
||||
len = @as(u32, @truncate(slice.len));
|
||||
const next_value = this.remaining_values[0];
|
||||
this.remaining_values = this.remaining_values[1..];
|
||||
|
||||
// https://console.spec.whatwg.org/#formatter
|
||||
const max_before_e_notation = 1000000000000000000000;
|
||||
const min_before_e_notation = 0.000001;
|
||||
switch (token) {
|
||||
Tag.String => this.printAs(Tag.String, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors),
|
||||
Tag.Double => this.printAs(Tag.Double, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors),
|
||||
Tag.Object => this.printAs(Tag.Object, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors),
|
||||
Tag.Integer => this.printAs(Tag.Integer, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors),
|
||||
.s => this.printAs(Tag.String, Writer, writer_, next_value, next_value.jsType(), enable_ansi_colors),
|
||||
.i => {
|
||||
// 1. If Type(current) is Symbol, let converted be NaN
|
||||
// 2. Otherwise, let converted be the result of Call(%parseInt%, undefined, current, 10)
|
||||
const int: i64 = brk: {
|
||||
// This logic is convoluted because %parseInt% will coerce the argument to a string
|
||||
// first. As an optimization, we can check if the argument is a number and
|
||||
// skip such coercion.
|
||||
if (next_value.isInt32()) {
|
||||
// Already an int, parseInt will parse to itself.
|
||||
break :brk next_value.asInt32();
|
||||
}
|
||||
|
||||
// undefined is overloaded to mean the '%o" field
|
||||
Tag.Undefined => this.format(Tag.get(next_value, globalThis), Writer, writer_, next_value, globalThis, enable_ansi_colors),
|
||||
if (next_value.isNumber() or !next_value.isSymbol()) double_convert: {
|
||||
var value = next_value.coerceToDouble(global);
|
||||
|
||||
else => unreachable,
|
||||
if (!std.math.isFinite(value)) {
|
||||
// for NaN and the string Infinity and -Infinity, parseInt returns NaN
|
||||
break :double_convert;
|
||||
}
|
||||
|
||||
// simulate parseInt, which converts the argument to a string and
|
||||
// then back to a number, without converting it to a string
|
||||
if (value == 0) {
|
||||
break :brk 0;
|
||||
}
|
||||
|
||||
const sign: i64 = if (value < 0) -1 else 1;
|
||||
value = @abs(value);
|
||||
if (value >= max_before_e_notation) {
|
||||
// toString prints 1.000+e0, which parseInt will stop at
|
||||
// the '.' or the '+', this gives us a single digit value.
|
||||
while (value >= 10) value /= 10;
|
||||
break :brk @as(i64, @intFromFloat(@floor(value))) * sign;
|
||||
} else if (value < min_before_e_notation) {
|
||||
// toString prints 1.000-e0, which parseInt will stop at
|
||||
// the '.' or the '-', this gives us a single digit value.
|
||||
while (value < 1) value *= 10;
|
||||
break :brk @as(i64, @intFromFloat(@floor(value))) * sign;
|
||||
}
|
||||
|
||||
// parsing stops at '.', so this is equal to @floor
|
||||
break :brk @as(i64, @intFromFloat(@floor(value))) * sign;
|
||||
}
|
||||
|
||||
// for NaN and the string Infinity and -Infinity, parseInt returns NaN
|
||||
this.addForNewLine("NaN".len);
|
||||
writer.print("NaN", .{});
|
||||
continue;
|
||||
};
|
||||
|
||||
if (int < std.math.maxInt(u32)) {
|
||||
const is_negative = int < 0;
|
||||
const digits = if (i != 0)
|
||||
bun.fmt.fastDigitCount(@as(u64, @intCast(@abs(int)))) + @as(u64, @intFromBool(is_negative))
|
||||
else
|
||||
1;
|
||||
this.addForNewLine(digits);
|
||||
} else {
|
||||
this.addForNewLine(bun.fmt.count("{d}", .{int}));
|
||||
}
|
||||
writer.print("{d}", .{int});
|
||||
},
|
||||
|
||||
.f => {
|
||||
// 1. If Type(current) is Symbol, let converted be NaN
|
||||
// 2. Otherwise, let converted be the result of Call(%parseFloat%, undefined, [current]).
|
||||
const converted: f64 = brk: {
|
||||
if (next_value.isInt32()) {
|
||||
const int = next_value.asInt32();
|
||||
const is_negative = int < 0;
|
||||
const digits = if (i != 0)
|
||||
bun.fmt.fastDigitCount(@as(u64, @intCast(@abs(int)))) + @as(u64, @intFromBool(is_negative))
|
||||
else
|
||||
1;
|
||||
this.addForNewLine(digits);
|
||||
writer.print("{d}", .{int});
|
||||
continue;
|
||||
}
|
||||
if (next_value.isNumber()) {
|
||||
break :brk next_value.asNumber();
|
||||
}
|
||||
if (next_value.isSymbol()) {
|
||||
break :brk std.math.nan(f64);
|
||||
}
|
||||
// TODO: this is not perfectly emulating parseFloat,
|
||||
// because spec says to convert the value to a string
|
||||
// and then parse as a number, but we are just coercing
|
||||
// a number.
|
||||
break :brk next_value.coerceToDouble(global);
|
||||
};
|
||||
|
||||
const abs = @abs(converted);
|
||||
if (abs < max_before_e_notation and abs >= min_before_e_notation) {
|
||||
this.addForNewLine(bun.fmt.count("{d}", .{converted}));
|
||||
writer.print("{d}", .{converted});
|
||||
} else if (std.math.isNan(converted)) {
|
||||
this.addForNewLine("NaN".len);
|
||||
writer.writeAll("NaN");
|
||||
} else if (std.math.isInf(converted)) {
|
||||
this.addForNewLine("Infinity".len + @as(usize, @intFromBool(converted < 0)));
|
||||
if (converted < 0) {
|
||||
writer.writeAll("-");
|
||||
}
|
||||
writer.writeAll("Infinity");
|
||||
} else {
|
||||
var buf: [124]u8 = undefined;
|
||||
const formatted = bun.fmt.FormatDouble.dtoa(&buf, converted);
|
||||
this.addForNewLine(formatted.len);
|
||||
writer.print("{s}", .{formatted});
|
||||
}
|
||||
},
|
||||
|
||||
inline .o, .O => |t| {
|
||||
if (t == .o) {
|
||||
// TODO: Node.js applies the following extra formatter options.
|
||||
//
|
||||
// this.max_depth = 4;
|
||||
// this.show_proxy = true;
|
||||
// this.show_hidden = true;
|
||||
//
|
||||
// Spec defines %o as:
|
||||
// > An object with optimally useful formatting is an
|
||||
// > implementation-specific, potentially-interactive representation
|
||||
// > of an object judged to be maximally useful and informative.
|
||||
}
|
||||
this.format(Tag.get(next_value, global), Writer, writer_, next_value, global, enable_ansi_colors);
|
||||
},
|
||||
|
||||
.c => {
|
||||
// TODO: Implement %c
|
||||
},
|
||||
}
|
||||
if (this.remaining_values.len == 0) break;
|
||||
},
|
||||
@@ -1523,7 +1661,7 @@ pub const Formatter = struct {
|
||||
parent: JSValue,
|
||||
const enable_ansi_colors = enable_ansi_colors_;
|
||||
pub fn handleFirstProperty(this: *@This(), globalThis: *JSC.JSGlobalObject, value: JSValue) void {
|
||||
if (!value.jsType().isFunction()) {
|
||||
if (value.isCell() and !value.jsType().isFunction()) {
|
||||
var writer = WrappedWriter(Writer){
|
||||
.ctx = this.writer,
|
||||
.failed = false,
|
||||
@@ -1735,6 +1873,7 @@ pub const Formatter = struct {
|
||||
this.writeWithFormatting(Writer, writer_, @TypeOf(slice), slice, this.globalThis, enable_ansi_colors);
|
||||
},
|
||||
.String => {
|
||||
// This is called from the '%s' formatter, so it can actually be any value
|
||||
const str: bun.String = bun.String.tryFromJS(value, this.globalThis) orelse {
|
||||
writer.failed = true;
|
||||
return;
|
||||
@@ -1849,8 +1988,10 @@ pub const Formatter = struct {
|
||||
this.addForNewLine("NaN".len);
|
||||
writer.print(comptime Output.prettyFmt("<r><yellow>NaN<r>", enable_ansi_colors), .{});
|
||||
} else {
|
||||
this.addForNewLine(std.fmt.count("{d}", .{num}));
|
||||
writer.print(comptime Output.prettyFmt("<r><yellow>{d}<r>", enable_ansi_colors), .{num});
|
||||
var buf: [124]u8 = undefined;
|
||||
const formatted = bun.fmt.FormatDouble.dtoaWithNegativeZero(&buf, num);
|
||||
this.addForNewLine(formatted.len);
|
||||
writer.print(comptime Output.prettyFmt("<r><yellow>{s}<r>", enable_ansi_colors), .{formatted});
|
||||
}
|
||||
},
|
||||
.Undefined => {
|
||||
@@ -2735,6 +2876,7 @@ pub const Formatter = struct {
|
||||
writer.writeAll(" />");
|
||||
},
|
||||
.Object => {
|
||||
std.debug.assert(value.isCell());
|
||||
const prev_quote_strings = this.quote_strings;
|
||||
this.quote_strings = true;
|
||||
defer this.quote_strings = prev_quote_strings;
|
||||
|
||||
Submodule src/bun.js/WebKit updated: 089023cc90...e3a2d89a0b
@@ -8,19 +8,13 @@
|
||||
/// - Run "make dev"
|
||||
pub const BunObject = struct {
|
||||
// --- Callbacks ---
|
||||
pub const DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump = dump_mimalloc;
|
||||
pub const _Os = Bun._Os;
|
||||
pub const _Path = Bun._Path;
|
||||
pub const allocUnsafe = Bun.allocUnsafe;
|
||||
pub const build = Bun.JSBundler.buildFn;
|
||||
pub const connect = JSC.wrapStaticMethod(JSC.API.Listener, "connect", false);
|
||||
pub const deflateSync = JSC.wrapStaticMethod(JSZlib, "deflateSync", true);
|
||||
pub const file = WebCore.Blob.constructBunFile;
|
||||
pub const fs = Bun.fs;
|
||||
pub const gc = Bun.runGC;
|
||||
pub const generateHeapSnapshot = Bun.generateHeapSnapshot;
|
||||
pub const getImportedStyles = Bun.getImportedStyles;
|
||||
pub const getPublicPath = Bun.getPublicPathJS;
|
||||
pub const gunzipSync = JSC.wrapStaticMethod(JSZlib, "gunzipSync", true);
|
||||
pub const gzipSync = JSC.wrapStaticMethod(JSZlib, "gzipSync", true);
|
||||
pub const indexOfLine = Bun.indexOfLine;
|
||||
@@ -42,8 +36,6 @@ pub const BunObject = struct {
|
||||
pub const which = Bun.which;
|
||||
pub const write = JSC.WebCore.Blob.writeFile;
|
||||
pub const stringWidth = Bun.stringWidth;
|
||||
pub const shellParse = Bun.shellParse;
|
||||
pub const shellLex = Bun.shellLex;
|
||||
pub const braces = Bun.braces;
|
||||
pub const shellEscape = Bun.shellEscape;
|
||||
// --- Callbacks ---
|
||||
@@ -128,19 +120,14 @@ pub const BunObject = struct {
|
||||
// --- Getters --
|
||||
|
||||
// -- Callbacks --
|
||||
@export(BunObject.DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump, .{ .name = callbackName("DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump") });
|
||||
@export(BunObject._Os, .{ .name = callbackName("_Os") });
|
||||
@export(BunObject._Path, .{ .name = callbackName("_Path") });
|
||||
@export(BunObject.allocUnsafe, .{ .name = callbackName("allocUnsafe") });
|
||||
@export(BunObject.braces, .{ .name = callbackName("braces") });
|
||||
@export(BunObject.build, .{ .name = callbackName("build") });
|
||||
@export(BunObject.connect, .{ .name = callbackName("connect") });
|
||||
@export(BunObject.deflateSync, .{ .name = callbackName("deflateSync") });
|
||||
@export(BunObject.file, .{ .name = callbackName("file") });
|
||||
@export(BunObject.fs, .{ .name = callbackName("fs") });
|
||||
@export(BunObject.gc, .{ .name = callbackName("gc") });
|
||||
@export(BunObject.generateHeapSnapshot, .{ .name = callbackName("generateHeapSnapshot") });
|
||||
@export(BunObject.getImportedStyles, .{ .name = callbackName("getImportedStyles") });
|
||||
@export(BunObject.gunzipSync, .{ .name = callbackName("gunzipSync") });
|
||||
@export(BunObject.gzipSync, .{ .name = callbackName("gzipSync") });
|
||||
@export(BunObject.indexOfLine, .{ .name = callbackName("indexOfLine") });
|
||||
@@ -162,8 +149,6 @@ pub const BunObject = struct {
|
||||
@export(BunObject.which, .{ .name = callbackName("which") });
|
||||
@export(BunObject.write, .{ .name = callbackName("write") });
|
||||
@export(BunObject.stringWidth, .{ .name = callbackName("stringWidth") });
|
||||
@export(BunObject.shellParse, .{ .name = callbackName("shellParse") });
|
||||
@export(BunObject.shellLex, .{ .name = callbackName("shellLex") });
|
||||
@export(BunObject.shellEscape, .{ .name = callbackName("shellEscape") });
|
||||
// -- Callbacks --
|
||||
}
|
||||
@@ -303,163 +288,6 @@ pub fn getCSSImports() []ZigString {
|
||||
return css_imports_list_strings[0..tail];
|
||||
}
|
||||
|
||||
pub fn shellLex(
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
callframe: *JSC.CallFrame,
|
||||
) callconv(.C) JSC.JSValue {
|
||||
const arguments_ = callframe.arguments(1);
|
||||
var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice());
|
||||
const string_args = arguments.nextEat() orelse {
|
||||
globalThis.throw("shell_parse: expected 2 arguments, got 0", .{});
|
||||
return JSC.JSValue.jsUndefined();
|
||||
};
|
||||
|
||||
var arena = std.heap.ArenaAllocator.init(bun.default_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const template_args = callframe.argumentsPtr()[1..callframe.argumentsCount()];
|
||||
var stack_alloc = std.heap.stackFallback(@sizeOf(bun.String) * 4, arena.allocator());
|
||||
var jsstrings = std.ArrayList(bun.String).initCapacity(stack_alloc.get(), 4) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return .undefined;
|
||||
};
|
||||
defer {
|
||||
for (jsstrings.items[0..]) |bunstr| {
|
||||
bunstr.deref();
|
||||
}
|
||||
jsstrings.deinit();
|
||||
}
|
||||
var jsobjs = std.ArrayList(JSValue).init(arena.allocator());
|
||||
defer {
|
||||
for (jsobjs.items) |jsval| {
|
||||
jsval.unprotect();
|
||||
}
|
||||
}
|
||||
|
||||
var script = std.ArrayList(u8).init(arena.allocator());
|
||||
if (!(bun.shell.shellCmdFromJS(globalThis, string_args, template_args, &jsobjs, &jsstrings, &script) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
})) {
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
const lex_result = brk: {
|
||||
if (bun.strings.isAllASCII(script.items[0..])) {
|
||||
var lexer = Shell.LexerAscii.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
|
||||
lexer.lex() catch |err| {
|
||||
globalThis.throwError(err, "failed to lex shell");
|
||||
return JSValue.undefined;
|
||||
};
|
||||
break :brk lexer.get_result();
|
||||
}
|
||||
var lexer = Shell.LexerUnicode.new(arena.allocator(), script.items[0..], jsstrings.items[0..]);
|
||||
lexer.lex() catch |err| {
|
||||
globalThis.throwError(err, "failed to lex shell");
|
||||
return JSValue.undefined;
|
||||
};
|
||||
break :brk lexer.get_result();
|
||||
};
|
||||
|
||||
if (lex_result.errors.len > 0) {
|
||||
const str = lex_result.combineErrors(arena.allocator());
|
||||
globalThis.throwPretty("{s}", .{str});
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
var test_tokens = std.ArrayList(Shell.Test.TestToken).initCapacity(arena.allocator(), lex_result.tokens.len) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
};
|
||||
for (lex_result.tokens) |tok| {
|
||||
const test_tok = Shell.Test.TestToken.from_real(tok, lex_result.strpool);
|
||||
test_tokens.append(test_tok) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
};
|
||||
}
|
||||
|
||||
const str = std.json.stringifyAlloc(globalThis.bunVM().allocator, test_tokens.items[0..], .{}) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
};
|
||||
|
||||
defer globalThis.bunVM().allocator.free(str);
|
||||
var bun_str = bun.String.fromBytes(str);
|
||||
return bun_str.toJS(globalThis);
|
||||
}
|
||||
|
||||
pub fn shellParse(
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
callframe: *JSC.CallFrame,
|
||||
) callconv(.C) JSC.JSValue {
|
||||
const arguments_ = callframe.arguments(1);
|
||||
var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice());
|
||||
const string_args = arguments.nextEat() orelse {
|
||||
globalThis.throw("shell_parse: expected 2 arguments, got 0", .{});
|
||||
return JSC.JSValue.jsUndefined();
|
||||
};
|
||||
|
||||
var arena = bun.ArenaAllocator.init(bun.default_allocator);
|
||||
defer arena.deinit();
|
||||
|
||||
const template_args = callframe.argumentsPtr()[1..callframe.argumentsCount()];
|
||||
var stack_alloc = std.heap.stackFallback(@sizeOf(bun.String) * 4, arena.allocator());
|
||||
var jsstrings = std.ArrayList(bun.String).initCapacity(stack_alloc.get(), 4) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return .undefined;
|
||||
};
|
||||
defer {
|
||||
for (jsstrings.items[0..]) |bunstr| {
|
||||
bunstr.deref();
|
||||
}
|
||||
jsstrings.deinit();
|
||||
}
|
||||
var jsobjs = std.ArrayList(JSValue).init(arena.allocator());
|
||||
defer {
|
||||
for (jsobjs.items) |jsval| {
|
||||
jsval.unprotect();
|
||||
}
|
||||
}
|
||||
var script = std.ArrayList(u8).init(arena.allocator());
|
||||
if (!(bun.shell.shellCmdFromJS(globalThis, string_args, template_args, &jsobjs, &jsstrings, &script) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
})) {
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
var out_parser: ?bun.shell.Parser = null;
|
||||
var out_lex_result: ?bun.shell.LexResult = null;
|
||||
|
||||
const script_ast = bun.shell.Interpreter.parse(&arena, script.items[0..], jsobjs.items[0..], jsstrings.items[0..], &out_parser, &out_lex_result) catch |err| {
|
||||
if (err == bun.shell.ParseError.Lex) {
|
||||
std.debug.assert(out_lex_result != null);
|
||||
const str = out_lex_result.?.combineErrors(arena.allocator());
|
||||
globalThis.throwPretty("{s}", .{str});
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
if (out_parser) |*p| {
|
||||
const errstr = p.combineErrors();
|
||||
globalThis.throwPretty("{s}", .{errstr});
|
||||
return .undefined;
|
||||
}
|
||||
|
||||
globalThis.throwError(err, "failed to lex/parse shell");
|
||||
return .undefined;
|
||||
};
|
||||
|
||||
const str = std.json.stringifyAlloc(globalThis.bunVM().allocator, script_ast, .{}) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return JSValue.undefined;
|
||||
};
|
||||
|
||||
defer globalThis.bunVM().allocator.free(str);
|
||||
var bun_str = bun.String.fromBytes(str);
|
||||
return bun_str.toJS(globalThis);
|
||||
}
|
||||
|
||||
const ShellTask = struct {
|
||||
arena: std.heap.Arena,
|
||||
script: std.ArrayList(u8),
|
||||
@@ -1383,6 +1211,19 @@ export fn Bun__resolveSync(
|
||||
};
|
||||
}
|
||||
|
||||
export fn Bun__resolveSyncWithStrings(
|
||||
global: *JSGlobalObject,
|
||||
specifier: *bun.String,
|
||||
source: *bun.String,
|
||||
is_esm: bool,
|
||||
) JSC.JSValue {
|
||||
Output.scoped(.importMetaResolve, false)("source: {s}, specifier: {s}", .{ source.*, specifier.* });
|
||||
var exception = [1]JSC.JSValueRef{null};
|
||||
return doResolveWithArgs(global, specifier.*, source.*, &exception, is_esm, true) orelse {
|
||||
return JSC.JSValue.fromRef(exception[0]);
|
||||
};
|
||||
}
|
||||
|
||||
export fn Bun__resolveSyncWithSource(
|
||||
global: *JSGlobalObject,
|
||||
specifier: JSValue,
|
||||
@@ -1399,14 +1240,6 @@ export fn Bun__resolveSyncWithSource(
|
||||
};
|
||||
}
|
||||
|
||||
comptime {
|
||||
if (!is_bindgen) {
|
||||
_ = Bun__resolve;
|
||||
_ = Bun__resolveSync;
|
||||
_ = Bun__resolveSyncWithSource;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn getPublicPathJS(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
const arguments = callframe.arguments(1).slice();
|
||||
if (arguments.len < 1) {
|
||||
@@ -1423,41 +1256,9 @@ pub fn getPublicPathJS(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFr
|
||||
return ZigString.init(stream.buffer[0..stream.pos]).toValueGC(globalObject);
|
||||
}
|
||||
|
||||
fn fs(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
var module = JSC.Node.NodeJSFS.new(.{
|
||||
.node_fs = .{
|
||||
.vm = globalObject.bunVM(),
|
||||
},
|
||||
});
|
||||
|
||||
return module.toJS(globalObject);
|
||||
}
|
||||
|
||||
fn _Os(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
return Node.Os.create(globalObject);
|
||||
}
|
||||
|
||||
fn _Path(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
const arguments = callframe.arguments(1);
|
||||
const args = arguments.slice();
|
||||
const is_windows = args.len == 1 and args[0].toBoolean();
|
||||
return Node.Path.create(globalObject, is_windows);
|
||||
}
|
||||
|
||||
/// @deprecated
|
||||
fn getImportedStyles(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
defer flushCSSImports();
|
||||
const styles = getCSSImports();
|
||||
if (styles.len == 0) {
|
||||
return JSC.JSValue.createEmptyArray(globalObject, 0);
|
||||
}
|
||||
|
||||
return JSValue.createStringArray(globalObject, styles.ptr, styles.len, true);
|
||||
}
|
||||
|
||||
extern fn dump_zone_malloc_stats() void;
|
||||
|
||||
pub fn dump_mimalloc(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
export fn dump_mimalloc(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
globalObject.bunVM().arena.dumpStats();
|
||||
if (comptime bun.is_heap_breakdown_enabled) {
|
||||
dump_zone_malloc_stats();
|
||||
@@ -3495,7 +3296,7 @@ pub fn getShellConstructor(
|
||||
globalThis: *JSC.JSGlobalObject,
|
||||
_: *JSC.JSObject,
|
||||
) callconv(.C) JSC.JSValue {
|
||||
return JSC.API.Shell.eval.Interpreter.getConstructor(globalThis);
|
||||
return JSC.API.Shell.Interpreter.getConstructor(globalThis);
|
||||
}
|
||||
|
||||
pub fn getGlobConstructor(
|
||||
@@ -3526,6 +3327,7 @@ const UnsafeObject = struct {
|
||||
.gcAggressionLevel = &gcAggressionLevel,
|
||||
.segfault = &__debug__doSegfault,
|
||||
.arrayBufferToString = &arrayBufferToString,
|
||||
.mimallocDump = &dump_mimalloc,
|
||||
};
|
||||
inline for (comptime std.meta.fieldNames(@TypeOf(fields))) |name| {
|
||||
object.put(
|
||||
@@ -5476,9 +5278,6 @@ const InternalTestingAPIs = struct {
|
||||
};
|
||||
|
||||
comptime {
|
||||
if (!JSC.is_bindgen) {
|
||||
_ = Crypto.JSPasswordObject.JSPasswordObject__create;
|
||||
BunObject.exportAll();
|
||||
@export(InternalTestingAPIs.BunInternalFunction__syntaxHighlighter, .{ .name = "BunInternalFunction__syntaxHighlighter" });
|
||||
}
|
||||
_ = Crypto.JSPasswordObject.JSPasswordObject__create;
|
||||
BunObject.exportAll();
|
||||
}
|
||||
|
||||
@@ -86,7 +86,7 @@ const TranspilerOptions = struct {
|
||||
// This is going to be hard to not leak
|
||||
pub const TransformTask = struct {
|
||||
input_code: JSC.Node.StringOrBuffer = JSC.Node.StringOrBuffer{ .buffer = .{} },
|
||||
output_code: ZigString = ZigString.init(""),
|
||||
output_code: bun.String = bun.String.empty,
|
||||
bundler: Bundler.Bundler = undefined,
|
||||
log: logger.Log,
|
||||
err: ?anyerror = null,
|
||||
@@ -96,12 +96,13 @@ pub const TransformTask = struct {
|
||||
global: *JSGlobalObject,
|
||||
replace_exports: Runtime.Features.ReplaceableExport.Map = .{},
|
||||
|
||||
pub usingnamespace bun.New(@This());
|
||||
|
||||
pub const AsyncTransformTask = JSC.ConcurrentPromiseTask(TransformTask);
|
||||
pub const AsyncTransformEventLoopTask = AsyncTransformTask.EventLoopTask;
|
||||
|
||||
pub fn create(transpiler: *Transpiler, input_code: bun.JSC.Node.StringOrBuffer, globalThis: *JSGlobalObject, loader: Loader) !*AsyncTransformTask {
|
||||
var transform_task = try bun.default_allocator.create(TransformTask);
|
||||
transform_task.* = .{
|
||||
var transform_task = TransformTask.new(.{
|
||||
.input_code = input_code,
|
||||
.bundler = undefined,
|
||||
.global = globalThis,
|
||||
@@ -110,7 +111,7 @@ pub const TransformTask = struct {
|
||||
.log = logger.Log.init(bun.default_allocator),
|
||||
.loader = loader,
|
||||
.replace_exports = transpiler.transpiler_options.runtime.replace_exports,
|
||||
};
|
||||
});
|
||||
transform_task.log.level = transpiler.transpiler_options.log.level;
|
||||
transform_task.bundler = transpiler.bundler;
|
||||
transform_task.bundler.linker.resolver = &transform_task.bundler.resolver;
|
||||
@@ -124,21 +125,36 @@ pub const TransformTask = struct {
|
||||
const name = this.loader.stdinName();
|
||||
const source = logger.Source.initPathString(name, this.input_code.slice());
|
||||
|
||||
JSAst.Stmt.Data.Store.create(bun.default_allocator);
|
||||
JSAst.Expr.Data.Store.create(bun.default_allocator);
|
||||
const prev_memory_allocators = .{ JSAst.Stmt.Data.Store.memory_allocator, JSAst.Expr.Data.Store.memory_allocator };
|
||||
defer {
|
||||
JSAst.Stmt.Data.Store.memory_allocator = prev_memory_allocators[0];
|
||||
JSAst.Expr.Data.Store.memory_allocator = prev_memory_allocators[1];
|
||||
}
|
||||
|
||||
var arena = Mimalloc.Arena.init() catch unreachable;
|
||||
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var ast_memory_allocator = allocator.create(JSAst.ASTMemoryAllocator) catch bun.outOfMemory();
|
||||
ast_memory_allocator.* = .{
|
||||
.allocator = allocator,
|
||||
};
|
||||
ast_memory_allocator.reset();
|
||||
JSAst.Stmt.Data.Store.memory_allocator = ast_memory_allocator;
|
||||
JSAst.Expr.Data.Store.memory_allocator = ast_memory_allocator;
|
||||
JSAst.Stmt.Data.Store.create(bun.default_allocator);
|
||||
JSAst.Expr.Data.Store.create(bun.default_allocator);
|
||||
|
||||
defer {
|
||||
this.input_code.deinitAndUnprotect();
|
||||
JSAst.Stmt.Data.Store.reset();
|
||||
JSAst.Expr.Data.Store.reset();
|
||||
arena.deinit();
|
||||
}
|
||||
|
||||
this.bundler.setAllocator(allocator);
|
||||
this.bundler.setLog(&this.log);
|
||||
this.log.msgs.allocator = bun.default_allocator;
|
||||
|
||||
const jsx = if (this.tsconfig != null)
|
||||
this.tsconfig.?.mergeJSX(this.bundler.options.jsx)
|
||||
else
|
||||
@@ -163,16 +179,15 @@ pub const TransformTask = struct {
|
||||
};
|
||||
|
||||
if (parse_result.empty) {
|
||||
this.output_code = ZigString.init("");
|
||||
this.output_code = bun.String.empty;
|
||||
return;
|
||||
}
|
||||
|
||||
const global_allocator = arena.backingAllocator();
|
||||
var buffer_writer = JSPrinter.BufferWriter.init(global_allocator) catch |err| {
|
||||
var buffer_writer = JSPrinter.BufferWriter.init(allocator) catch |err| {
|
||||
this.err = err;
|
||||
return;
|
||||
};
|
||||
buffer_writer.buffer.list.ensureTotalCapacity(global_allocator, 512) catch unreachable;
|
||||
buffer_writer.buffer.list.ensureTotalCapacity(allocator, 512) catch unreachable;
|
||||
buffer_writer.reset();
|
||||
|
||||
// defer {
|
||||
@@ -188,12 +203,9 @@ pub const TransformTask = struct {
|
||||
if (printed > 0) {
|
||||
buffer_writer = printer.ctx;
|
||||
buffer_writer.buffer.list.items = buffer_writer.written;
|
||||
|
||||
var output = JSC.ZigString.init(buffer_writer.written);
|
||||
output.mark();
|
||||
this.output_code = output;
|
||||
this.output_code = bun.String.createLatin1(buffer_writer.written);
|
||||
} else {
|
||||
this.output_code = ZigString.init("");
|
||||
this.output_code = bun.String.empty;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -219,28 +231,25 @@ pub const TransformTask = struct {
|
||||
return;
|
||||
}
|
||||
|
||||
finish(this.output_code, this.global, promise);
|
||||
|
||||
const global = this.global;
|
||||
const code = this.output_code;
|
||||
this.output_code = bun.String.empty;
|
||||
this.deinit();
|
||||
|
||||
finish(code, global, promise);
|
||||
}
|
||||
|
||||
noinline fn finish(code: ZigString, global: *JSGlobalObject, promise: *JSC.JSPromise) void {
|
||||
promise.resolve(global, code.toValueGC(global));
|
||||
noinline fn finish(code: bun.String, global: *JSGlobalObject, promise: *JSC.JSPromise) void {
|
||||
promise.resolve(global, code.toJS(global));
|
||||
code.deref();
|
||||
}
|
||||
|
||||
pub fn deinit(this: *TransformTask) void {
|
||||
var should_cleanup = false;
|
||||
defer if (should_cleanup) bun.Global.mimalloc_cleanup(false);
|
||||
|
||||
this.log.deinit();
|
||||
this.input_code.deinitAndUnprotect();
|
||||
this.output_code.deref();
|
||||
|
||||
if (this.output_code.isGloballyAllocated()) {
|
||||
should_cleanup = this.output_code.len > 512_000;
|
||||
this.output_code.deinitGlobal();
|
||||
}
|
||||
|
||||
bun.default_allocator.destroy(this);
|
||||
this.destroy();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1008,7 +1017,7 @@ pub fn transform(
|
||||
return .zero;
|
||||
};
|
||||
|
||||
var code = JSC.Node.StringOrBuffer.fromJS(globalThis, this.arena.allocator(), code_arg) orelse {
|
||||
var code = JSC.Node.StringOrBuffer.fromJSWithEncodingMaybeAsync(globalThis, bun.default_allocator, code_arg, .utf8, true) orelse {
|
||||
globalThis.throwInvalidArgumentType("transform", "code", "string or Uint8Array");
|
||||
return .zero;
|
||||
};
|
||||
@@ -1024,17 +1033,23 @@ pub fn transform(
|
||||
};
|
||||
|
||||
if (exception.* != null) {
|
||||
code.deinit();
|
||||
globalThis.throwValue(JSC.JSValue.c(exception.*));
|
||||
return .zero;
|
||||
}
|
||||
|
||||
code.toThreadSafe();
|
||||
if (code == .buffer) {
|
||||
code_arg.protect();
|
||||
}
|
||||
var task = TransformTask.create(
|
||||
this,
|
||||
code,
|
||||
globalThis,
|
||||
loader orelse this.transpiler_options.default_loader,
|
||||
) catch {
|
||||
if (code == .buffer) {
|
||||
code_arg.unprotect();
|
||||
}
|
||||
globalThis.throw("Out of memory", .{});
|
||||
return .zero;
|
||||
};
|
||||
|
||||
@@ -261,7 +261,7 @@ const SingleValueHeaders = bun.ComptimeStringMap(void, .{
|
||||
.{"x-content-type-options"},
|
||||
});
|
||||
|
||||
pub export fn BUN__HTTP2__getUnpackedSettings(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
pub fn jsGetUnpackedSettings(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
var settings: FullSettingsPayload = .{};
|
||||
|
||||
@@ -296,7 +296,7 @@ pub export fn BUN__HTTP2__getUnpackedSettings(globalObject: *JSC.JSGlobalObject,
|
||||
}
|
||||
}
|
||||
|
||||
pub export fn BUN__HTTP2_getPackedSettings(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue {
|
||||
pub fn jsGetPackedSettings(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSValue {
|
||||
var settings: FullSettingsPayload = .{};
|
||||
const args_list = callframe.arguments(1);
|
||||
|
||||
@@ -408,11 +408,6 @@ pub export fn BUN__HTTP2_getPackedSettings(globalObject: *JSC.JSGlobalObject, ca
|
||||
return binary_type.toJS(bytes, globalObject);
|
||||
}
|
||||
|
||||
comptime {
|
||||
_ = BUN__HTTP2__getUnpackedSettings;
|
||||
_ = BUN__HTTP2_getPackedSettings;
|
||||
}
|
||||
|
||||
const Handlers = struct {
|
||||
onError: JSC.JSValue = .zero,
|
||||
onWrite: JSC.JSValue = .zero,
|
||||
@@ -2541,3 +2536,11 @@ pub const H2FrameParser = struct {
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
pub fn createNodeHttp2Binding(global: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSArray.create(global, &.{
|
||||
H2FrameParser.getConstructor(global),
|
||||
JSC.JSFunction.create(global, "getPackedSettings", jsGetPackedSettings, 0, .{}),
|
||||
JSC.JSFunction.create(global, "getUnpackedSettings", jsGetUnpackedSettings, 0, .{}),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2888,6 +2888,9 @@ fn NewSocket(comptime ssl: bool) type {
|
||||
// open is not immediately called because we need to set bunSocketInternal
|
||||
tls.markActive();
|
||||
|
||||
// we're unrefing the original instance and refing the TLS instance
|
||||
tls.poll_ref.ref(this.handlers.vm);
|
||||
|
||||
// mark both instances on socket data
|
||||
new_socket.ext(WrappedSocket).?.* = .{ .tcp = raw, .tls = tls };
|
||||
|
||||
|
||||
@@ -409,9 +409,10 @@ pub const Stdio = union(enum) {
|
||||
}
|
||||
}
|
||||
} else if (value.asArrayBuffer(globalThis)) |array_buffer| {
|
||||
if (array_buffer.slice().len == 0) {
|
||||
globalThis.throwInvalidArguments("ArrayBuffer cannot be empty", .{});
|
||||
return false;
|
||||
// Change in Bun v1.0.34: don't throw for empty ArrayBuffer
|
||||
if (array_buffer.byteSlice().len == 0) {
|
||||
out_stdio.* = .{ .ignore = {} };
|
||||
return true;
|
||||
}
|
||||
|
||||
out_stdio.* = .{
|
||||
@@ -475,6 +476,12 @@ pub const Stdio = union(enum) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Instead of writing an empty blob, lets just make it /dev/null
|
||||
if (blob.fastSize() == 0) {
|
||||
stdio.* = .{ .ignore = {} };
|
||||
return true;
|
||||
}
|
||||
|
||||
stdio.* = .{ .blob = blob };
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -173,9 +173,9 @@ pub const Subprocess = struct {
|
||||
has_pending_activity: std.atomic.Value(bool) = std.atomic.Value(bool).init(true),
|
||||
this_jsvalue: JSC.JSValue = .zero,
|
||||
|
||||
ipc_mode: IPCMode,
|
||||
/// `null` indicates all of the IPC data is uninitialized.
|
||||
ipc_data: ?IPC.IPCData,
|
||||
ipc_callback: JSC.Strong = .{},
|
||||
ipc: IPC.IPCData,
|
||||
flags: Flags = .{},
|
||||
|
||||
weak_file_sink_stdin_ptr: ?*JSC.WebCore.FileSink = null,
|
||||
@@ -188,10 +188,14 @@ pub const Subprocess = struct {
|
||||
|
||||
pub const SignalCode = bun.SignalCode;
|
||||
|
||||
pub const IPCMode = enum {
|
||||
none,
|
||||
bun,
|
||||
// json,
|
||||
pub const Poll = union(enum) {
|
||||
poll_ref: ?*Async.FilePoll,
|
||||
wait_thread: WaitThreadPoll,
|
||||
};
|
||||
|
||||
pub const WaitThreadPoll = struct {
|
||||
ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0),
|
||||
poll_ref: Async.KeepAlive = .{},
|
||||
};
|
||||
|
||||
pub fn resourceUsage(
|
||||
@@ -234,7 +238,7 @@ pub const Subprocess = struct {
|
||||
}
|
||||
|
||||
pub fn hasPendingActivityNonThreadsafe(this: *const Subprocess) bool {
|
||||
if (this.ipc_mode != .none) {
|
||||
if (this.ipc_data != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -648,10 +652,14 @@ pub const Subprocess = struct {
|
||||
}
|
||||
|
||||
pub fn doSend(this: *Subprocess, global: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSValue {
|
||||
if (this.ipc_mode == .none) {
|
||||
global.throw("Subprocess.send() can only be used if an IPC channel is open.", .{});
|
||||
const ipc_data = &(this.ipc_data orelse {
|
||||
if (this.hasExited()) {
|
||||
global.throw("Subprocess.send() cannot be used after the process has exited.", .{});
|
||||
} else {
|
||||
global.throw("Subprocess.send() can only be used if an IPC channel is open.", .{});
|
||||
}
|
||||
return .zero;
|
||||
}
|
||||
});
|
||||
|
||||
if (callFrame.argumentsCount() == 0) {
|
||||
global.throwInvalidArguments("Subprocess.send() requires one argument", .{});
|
||||
@@ -660,16 +668,16 @@ pub const Subprocess = struct {
|
||||
|
||||
const value = callFrame.argument(0);
|
||||
|
||||
const success = this.ipc.serializeAndSend(global, value);
|
||||
const success = ipc_data.serializeAndSend(global, value);
|
||||
if (!success) return .zero;
|
||||
|
||||
return JSC.JSValue.jsUndefined();
|
||||
}
|
||||
|
||||
pub fn disconnect(this: *Subprocess) void {
|
||||
if (this.ipc_mode == .none) return;
|
||||
this.ipc.socket.close(0, null);
|
||||
this.ipc_mode = .none;
|
||||
const ipc_data = this.ipc_data orelse return;
|
||||
ipc_data.socket.close(0, null);
|
||||
this.ipc_data = null;
|
||||
}
|
||||
|
||||
pub fn pid(this: *const Subprocess) i32 {
|
||||
@@ -701,7 +709,7 @@ pub const Subprocess = struct {
|
||||
|
||||
this.observable_getters.insert(.stdio);
|
||||
var pipes = this.stdio_pipes.items;
|
||||
if (this.ipc_mode != .none) {
|
||||
if (this.ipc_data != null) {
|
||||
array.push(global, .null);
|
||||
pipes = pipes[@min(1, pipes.len)..];
|
||||
}
|
||||
@@ -1577,7 +1585,7 @@ pub const Subprocess = struct {
|
||||
var cmd_value = JSValue.zero;
|
||||
var detached = false;
|
||||
var args = args_;
|
||||
var ipc_mode = IPCMode.none;
|
||||
var maybe_ipc_mode: if (is_sync) void else ?IPC.Mode = if (is_sync) {} else null;
|
||||
var ipc_callback: JSValue = .zero;
|
||||
var extra_fds = std.ArrayList(bun.spawn.SpawnOptions.Stdio).init(bun.default_allocator);
|
||||
var argv0: ?[*:0]const u8 = null;
|
||||
@@ -1690,18 +1698,36 @@ pub const Subprocess = struct {
|
||||
if (args != .zero and args.isObject()) {
|
||||
|
||||
// This must run before the stdio parsing happens
|
||||
if (args.getTruthy(globalThis, "ipc")) |val| {
|
||||
if (val.isCell() and val.isCallable(globalThis.vm())) {
|
||||
// In the future, we should add a way to use a different IPC serialization format, specifically `json`.
|
||||
// but the only use case this has is doing interop with node.js IPC and other programs.
|
||||
ipc_mode = .bun;
|
||||
ipc_callback = val.withAsyncContextIfNeeded(globalThis);
|
||||
|
||||
if (Environment.isPosix) {
|
||||
extra_fds.append(.{ .buffer = {} }) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return .zero;
|
||||
if (!is_sync) {
|
||||
if (args.getTruthy(globalThis, "ipc")) |val| {
|
||||
if (val.isCell() and val.isCallable(globalThis.vm())) {
|
||||
maybe_ipc_mode = ipc_mode: {
|
||||
if (args.get(globalThis, "serialization")) |mode_val| {
|
||||
if (mode_val.isString()) {
|
||||
const mode_str = mode_val.toBunString(globalThis);
|
||||
defer mode_str.deref();
|
||||
const slice = mode_str.toUTF8(bun.default_allocator);
|
||||
defer slice.deinit();
|
||||
break :ipc_mode IPC.Mode.fromString(slice.slice()) orelse {
|
||||
globalThis.throwInvalidArguments("serialization must be \"json\" or \"advanced\"", .{});
|
||||
return .zero;
|
||||
};
|
||||
} else {
|
||||
globalThis.throwInvalidArguments("serialization must be a 'string'", .{});
|
||||
return .zero;
|
||||
}
|
||||
}
|
||||
break :ipc_mode .advanced;
|
||||
};
|
||||
|
||||
ipc_callback = val.withAsyncContextIfNeeded(globalThis);
|
||||
|
||||
if (Environment.isPosix) {
|
||||
extra_fds.append(.{ .buffer = {} }) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
return .zero;
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1871,13 +1897,8 @@ pub const Subprocess = struct {
|
||||
}
|
||||
}
|
||||
|
||||
var windows_ipc_env_buf: if (Environment.isWindows) ["BUN_INTERNAL_IPC_FD=\\\\.\\pipe\\BUN_IPC_00000000-0000-0000-0000-000000000000\x00".len]u8 else void = undefined;
|
||||
if (ipc_mode != .none) {
|
||||
if (comptime is_sync) {
|
||||
globalThis.throwInvalidArguments("IPC is not supported in Bun.spawnSync", .{});
|
||||
return .zero;
|
||||
}
|
||||
|
||||
var windows_ipc_env_buf: if (Environment.isWindows) ["NODE_CHANNEL_FD=\\\\.\\pipe\\BUN_IPC_00000000-0000-0000-0000-000000000000\x00".len]u8 else void = undefined;
|
||||
if (!is_sync) if (maybe_ipc_mode) |ipc_mode| {
|
||||
// IPC is currently implemented in a very limited way.
|
||||
//
|
||||
// Node lets you pass as many fds as you want, they all become be sockets; then, IPC is just a special
|
||||
@@ -1890,17 +1911,25 @@ pub const Subprocess = struct {
|
||||
// behavior, where this workaround suffices.
|
||||
//
|
||||
// When Bun.spawn() is given an `.ipc` callback, it enables IPC as follows:
|
||||
env_array.ensureUnusedCapacity(allocator, 2) catch |err| return globalThis.handleError(err, "in Bun.spawn");
|
||||
env_array.ensureUnusedCapacity(allocator, 3) catch |err| return globalThis.handleError(err, "in Bun.spawn");
|
||||
if (Environment.isPosix) {
|
||||
env_array.appendAssumeCapacity("BUN_INTERNAL_IPC_FD=3");
|
||||
env_array.appendAssumeCapacity("NODE_CHANNEL_FD=3");
|
||||
} else {
|
||||
const uuid = globalThis.bunVM().rareData().nextUUID();
|
||||
const pipe_env = std.fmt.bufPrintZ(&windows_ipc_env_buf, "BUN_INTERNAL_IPC_FD=\\\\.\\pipe\\BUN_IPC_{s}", .{uuid}) catch |err| switch (err) {
|
||||
const pipe_env = std.fmt.bufPrintZ(
|
||||
&windows_ipc_env_buf,
|
||||
"NODE_CHANNEL_FD=\\\\.\\pipe\\BUN_IPC_{s}",
|
||||
.{uuid},
|
||||
) catch |err| switch (err) {
|
||||
error.NoSpaceLeft => unreachable, // upper bound for this string is known
|
||||
};
|
||||
env_array.appendAssumeCapacity(pipe_env);
|
||||
}
|
||||
}
|
||||
|
||||
env_array.appendAssumeCapacity(switch (ipc_mode) {
|
||||
inline else => |t| "NODE_CHANNEL_SERIALIZATION_MODE=" ++ @tagName(t),
|
||||
});
|
||||
};
|
||||
|
||||
env_array.append(allocator, null) catch {
|
||||
globalThis.throwOutOfMemory();
|
||||
@@ -1968,8 +1997,8 @@ pub const Subprocess = struct {
|
||||
};
|
||||
|
||||
var posix_ipc_info: if (Environment.isPosix) IPC.Socket else void = undefined;
|
||||
if (Environment.isPosix) {
|
||||
if (ipc_mode != .none) {
|
||||
if (Environment.isPosix and !is_sync) {
|
||||
if (maybe_ipc_mode != null) {
|
||||
posix_ipc_info = .{
|
||||
// we initialize ext later in the function
|
||||
.socket = uws.us_socket_from_fd(
|
||||
@@ -2022,32 +2051,41 @@ pub const Subprocess = struct {
|
||||
),
|
||||
.stdio_pipes = spawned.extra_pipes.moveToUnmanaged(),
|
||||
.on_exit_callback = if (on_exit_callback != .zero) JSC.Strong.create(on_exit_callback, globalThis) else .{},
|
||||
.ipc_mode = ipc_mode,
|
||||
// will be assigned in the block below
|
||||
.ipc = if (Environment.isWindows) .{} else .{ .socket = posix_ipc_info },
|
||||
.ipc_callback = if (ipc_callback != .zero) JSC.Strong.create(ipc_callback, globalThis) else undefined,
|
||||
.ipc_data = if (!is_sync)
|
||||
if (maybe_ipc_mode) |ipc_mode|
|
||||
if (Environment.isWindows) .{
|
||||
.mode = ipc_mode,
|
||||
} else .{
|
||||
.socket = posix_ipc_info,
|
||||
.mode = ipc_mode,
|
||||
}
|
||||
else
|
||||
null
|
||||
else
|
||||
null,
|
||||
.ipc_callback = if (ipc_callback != .zero) JSC.Strong.create(ipc_callback, globalThis) else .{},
|
||||
.flags = .{
|
||||
.is_sync = is_sync,
|
||||
},
|
||||
};
|
||||
subprocess.process.setExitHandler(subprocess);
|
||||
|
||||
if (ipc_mode != .none) {
|
||||
if (subprocess.ipc_data) |*ipc_data| {
|
||||
if (Environment.isPosix) {
|
||||
const ptr = posix_ipc_info.ext(*Subprocess);
|
||||
ptr.?.* = subprocess;
|
||||
} else {
|
||||
if (subprocess.ipc.configureServer(
|
||||
if (ipc_data.configureServer(
|
||||
Subprocess,
|
||||
subprocess,
|
||||
windows_ipc_env_buf["BUN_INTERNAL_IPC_FD=".len..],
|
||||
windows_ipc_env_buf["NODE_CHANNEL_FD=".len..],
|
||||
).asErr()) |err| {
|
||||
process_allocator.destroy(subprocess);
|
||||
globalThis.throwValue(err.toJSC(globalThis));
|
||||
return .zero;
|
||||
}
|
||||
}
|
||||
subprocess.ipc.writeVersionPacket();
|
||||
ipc_data.writeVersionPacket();
|
||||
}
|
||||
|
||||
if (subprocess.stdin == .pipe) {
|
||||
@@ -2180,9 +2218,13 @@ pub const Subprocess = struct {
|
||||
}
|
||||
|
||||
pub fn handleIPCClose(this: *Subprocess) void {
|
||||
this.ipc_mode = .none;
|
||||
this.ipc_data = null;
|
||||
this.updateHasPendingActivity();
|
||||
}
|
||||
|
||||
pub fn ipc(this: *Subprocess) *IPC.IPCData {
|
||||
return &this.ipc_data.?;
|
||||
}
|
||||
|
||||
pub const IPCHandler = IPC.NewIPCHandler(Subprocess);
|
||||
};
|
||||
|
||||
@@ -231,9 +231,8 @@ pub const WalkTask = struct {
|
||||
};
|
||||
|
||||
fn globWalkResultToJS(globWalk: *GlobWalker, globalThis: *JSGlobalObject) JSValue {
|
||||
// if (globWalk.matchedPaths.items.len >= 0) {
|
||||
if (globWalk.matchedPaths.items.len == 0) {
|
||||
return JSC.JSArray.from(globalThis, &[_]JSC.JSValue{});
|
||||
return JSC.JSValue.createEmptyArray(globalThis, 0);
|
||||
}
|
||||
|
||||
return BunString.toJSArray(globalThis, globWalk.matchedPaths.items[0..]);
|
||||
|
||||
@@ -1571,7 +1571,9 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
}
|
||||
}
|
||||
|
||||
pub fn onWritableResponseBuffer(this: *RequestContext, _: c_ulong, resp: *App.Response) callconv(.C) bool {
|
||||
pub fn onWritableResponseBuffer(this: *RequestContext, _: u64, resp: *App.Response) callconv(.C) bool {
|
||||
ctxLog("onWritableResponseBuffer", .{});
|
||||
|
||||
std.debug.assert(this.resp == resp);
|
||||
if (this.flags.aborted) {
|
||||
this.finalizeForAbort();
|
||||
@@ -1583,7 +1585,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
}
|
||||
|
||||
// TODO: should we cork?
|
||||
pub fn onWritableCompleteResponseBufferAndMetadata(this: *RequestContext, write_offset: c_ulong, resp: *App.Response) callconv(.C) bool {
|
||||
pub fn onWritableCompleteResponseBufferAndMetadata(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
|
||||
ctxLog("onWritableCompleteResponseBufferAndMetadata", .{});
|
||||
std.debug.assert(this.resp == resp);
|
||||
|
||||
if (this.flags.aborted) {
|
||||
@@ -1604,7 +1607,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
return this.sendWritableBytesForCompleteResponseBuffer(this.response_buf_owned.items, write_offset, resp);
|
||||
}
|
||||
|
||||
pub fn onWritableCompleteResponseBuffer(this: *RequestContext, write_offset: c_ulong, resp: *App.Response) callconv(.C) bool {
|
||||
pub fn onWritableCompleteResponseBuffer(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
|
||||
ctxLog("onWritableCompleteResponseBuffer", .{});
|
||||
std.debug.assert(this.resp == resp);
|
||||
if (this.flags.aborted) {
|
||||
this.finalizeForAbort();
|
||||
@@ -1945,7 +1949,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn onWritableBytes(this: *RequestContext, write_offset: c_ulong, resp: *App.Response) callconv(.C) bool {
|
||||
pub fn onWritableBytes(this: *RequestContext, write_offset: u64, resp: *App.Response) callconv(.C) bool {
|
||||
ctxLog("onWritableBytes", .{});
|
||||
std.debug.assert(this.resp == resp);
|
||||
if (this.flags.aborted) {
|
||||
this.finalizeForAbort();
|
||||
@@ -1960,7 +1965,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn sendWritableBytesForBlob(this: *RequestContext, bytes_: []const u8, write_offset_: c_ulong, resp: *App.Response) bool {
|
||||
pub fn sendWritableBytesForBlob(this: *RequestContext, bytes_: []const u8, write_offset_: u64, resp: *App.Response) bool {
|
||||
std.debug.assert(this.resp == resp);
|
||||
const write_offset: usize = write_offset_;
|
||||
|
||||
@@ -1975,7 +1980,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sendWritableBytesForCompleteResponseBuffer(this: *RequestContext, bytes_: []const u8, write_offset_: c_ulong, resp: *App.Response) bool {
|
||||
pub fn sendWritableBytesForCompleteResponseBuffer(this: *RequestContext, bytes_: []const u8, write_offset_: u64, resp: *App.Response) bool {
|
||||
const write_offset: usize = write_offset_;
|
||||
std.debug.assert(this.resp == resp);
|
||||
|
||||
@@ -1991,7 +1996,8 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn onWritableSendfile(this: *RequestContext, _: c_ulong, _: *App.Response) callconv(.C) bool {
|
||||
pub fn onWritableSendfile(this: *RequestContext, _: u64, _: *App.Response) callconv(.C) bool {
|
||||
ctxLog("onWritableSendfile", .{});
|
||||
return this.onSendfile();
|
||||
}
|
||||
|
||||
|
||||
@@ -39,6 +39,14 @@ function source(name) {
|
||||
isClosed: {
|
||||
getter: "getIsClosedFromJS",
|
||||
},
|
||||
...(name === "File"
|
||||
? {
|
||||
setRawMode: {
|
||||
fn: "setRawModeFromJS",
|
||||
length: 1,
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
klass: {},
|
||||
values: ["pendingPromise", "onCloseCallback", "onDrainCallback"],
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#include "root.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include "AsyncContextFrame.h"
|
||||
#include <JavaScriptCore/InternalFieldTuple.h>
|
||||
#include <JavaScriptCore/Bun_InternalFieldTuple.h>
|
||||
|
||||
using namespace JSC;
|
||||
using namespace WebCore;
|
||||
@@ -10,11 +10,11 @@ const ClassInfo AsyncContextFrame::s_info = { "AsyncContextFrame"_s, &Base::s_in
|
||||
|
||||
AsyncContextFrame* AsyncContextFrame::create(VM& vm, JSC::Structure* structure, JSValue callback, JSValue context)
|
||||
{
|
||||
AsyncContextFrame* asyncContextData = new (NotNull, allocateCell<AsyncContextFrame>(vm)) AsyncContextFrame(vm, structure);
|
||||
asyncContextData->finishCreation(vm);
|
||||
asyncContextData->callback.set(vm, asyncContextData, callback);
|
||||
asyncContextData->context.set(vm, asyncContextData, context);
|
||||
return asyncContextData;
|
||||
AsyncContextFrame* asyncContextFrame = new (NotNull, allocateCell<AsyncContextFrame>(vm)) AsyncContextFrame(vm, structure);
|
||||
asyncContextFrame->finishCreation(vm);
|
||||
asyncContextFrame->callback.set(vm, asyncContextFrame, callback);
|
||||
asyncContextFrame->context.set(vm, asyncContextFrame, context);
|
||||
return asyncContextFrame;
|
||||
}
|
||||
|
||||
AsyncContextFrame* AsyncContextFrame::create(JSGlobalObject* global, JSValue callback, JSValue context)
|
||||
@@ -22,11 +22,11 @@ AsyncContextFrame* AsyncContextFrame::create(JSGlobalObject* global, JSValue cal
|
||||
auto& vm = global->vm();
|
||||
ASSERT(callback.isCallable());
|
||||
auto* structure = jsCast<Zig::GlobalObject*>(global)->AsyncContextFrameStructure();
|
||||
AsyncContextFrame* asyncContextData = new (NotNull, allocateCell<AsyncContextFrame>(vm)) AsyncContextFrame(vm, structure);
|
||||
asyncContextData->finishCreation(vm);
|
||||
asyncContextData->callback.set(vm, asyncContextData, callback);
|
||||
asyncContextData->context.set(vm, asyncContextData, context);
|
||||
return asyncContextData;
|
||||
AsyncContextFrame* asyncContextFrame = new (NotNull, allocateCell<AsyncContextFrame>(vm)) AsyncContextFrame(vm, structure);
|
||||
asyncContextFrame->finishCreation(vm);
|
||||
asyncContextFrame->callback.set(vm, asyncContextFrame, callback);
|
||||
asyncContextFrame->context.set(vm, asyncContextFrame, context);
|
||||
return asyncContextFrame;
|
||||
}
|
||||
|
||||
JSC::Structure* AsyncContextFrame::createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject)
|
||||
@@ -36,10 +36,10 @@ JSC::Structure* AsyncContextFrame::createStructure(JSC::VM& vm, JSC::JSGlobalObj
|
||||
|
||||
JSValue AsyncContextFrame::withAsyncContextIfNeeded(JSGlobalObject* globalObject, JSValue callback)
|
||||
{
|
||||
JSValue context = globalObject->m_asyncContextData.get()->getInternalField(0);
|
||||
JSValue asyncContextData = globalObject->asyncContextTuple()->getInternalField(0);
|
||||
|
||||
// If there is no async context, do not snapshot the callback.
|
||||
if (context.isUndefined()) {
|
||||
if (asyncContextData.isUndefined()) {
|
||||
return callback;
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ JSValue AsyncContextFrame::withAsyncContextIfNeeded(JSGlobalObject* globalObject
|
||||
vm,
|
||||
jsCast<Zig::GlobalObject*>(globalObject)->AsyncContextFrameStructure(),
|
||||
callback,
|
||||
context);
|
||||
asyncContextData);
|
||||
}
|
||||
|
||||
template<typename Visitor>
|
||||
@@ -73,17 +73,17 @@ extern "C" JSC::EncodedJSValue AsyncContextFrame__withAsyncContextIfNeeded(JSGlo
|
||||
if (!functionObject.isCell()) \
|
||||
return jsUndefined(); \
|
||||
auto& vm = global->vm(); \
|
||||
JSValue restoreAsyncContext; \
|
||||
InternalFieldTuple* asyncContextData = nullptr; \
|
||||
JSValue oldAsyncContextData; \
|
||||
InternalFieldTuple* asyncContextTuple = nullptr; \
|
||||
if (auto* wrapper = jsDynamicCast<AsyncContextFrame*>(functionObject)) { \
|
||||
functionObject = jsCast<JSC::JSObject*>(wrapper->callback.get()); \
|
||||
asyncContextData = global->m_asyncContextData.get(); \
|
||||
restoreAsyncContext = asyncContextData->getInternalField(0); \
|
||||
asyncContextData->putInternalField(vm, 0, wrapper->context.get()); \
|
||||
asyncContextTuple = global->asyncContextTuple(); \
|
||||
oldAsyncContextData = asyncContextTuple->getInternalField(0); \
|
||||
asyncContextTuple->putInternalField(vm, 0, wrapper->context.get()); \
|
||||
} \
|
||||
auto result = JSC::profiledCall(__VA_ARGS__); \
|
||||
if (asyncContextData) { \
|
||||
asyncContextData->putInternalField(vm, 0, restoreAsyncContext); \
|
||||
if (asyncContextTuple) { \
|
||||
asyncContextTuple->putInternalField(vm, 0, oldAsyncContextData); \
|
||||
} \
|
||||
return result;
|
||||
|
||||
|
||||
@@ -180,7 +180,7 @@ public:
|
||||
Zig::GlobalObject* global = reinterpret_cast<Zig::GlobalObject*>(&globalObject);
|
||||
Vector<BunInspectorConnection*, 8> connections;
|
||||
{
|
||||
WTF::LockHolder locker(inspectorConnectionsLock);
|
||||
Locker<Lock> locker(inspectorConnectionsLock);
|
||||
connections.appendVector(inspectorConnections->get(global->scriptExecutionContext()->identifier()));
|
||||
}
|
||||
|
||||
@@ -236,7 +236,7 @@ public:
|
||||
WTF::Vector<WTF::String, 12> messages;
|
||||
|
||||
{
|
||||
WTF::LockHolder locker(jsThreadMessagesLock);
|
||||
Locker<Lock> locker(jsThreadMessagesLock);
|
||||
this->jsThreadMessages.swap(messages);
|
||||
}
|
||||
|
||||
@@ -276,7 +276,7 @@ public:
|
||||
WTF::Vector<WTF::String, 12> messages;
|
||||
|
||||
{
|
||||
WTF::LockHolder locker(debuggerThreadMessagesLock);
|
||||
Locker<Lock> locker(debuggerThreadMessagesLock);
|
||||
this->debuggerThreadMessages.swap(messages);
|
||||
}
|
||||
|
||||
@@ -297,7 +297,7 @@ public:
|
||||
void sendMessageToDebuggerThread(WTF::String&& inputMessage)
|
||||
{
|
||||
{
|
||||
WTF::LockHolder locker(debuggerThreadMessagesLock);
|
||||
Locker<Lock> locker(debuggerThreadMessagesLock);
|
||||
debuggerThreadMessages.append(inputMessage);
|
||||
}
|
||||
|
||||
@@ -311,7 +311,7 @@ public:
|
||||
void sendMessageToInspectorFromDebuggerThread(const WTF::String& inputMessage)
|
||||
{
|
||||
{
|
||||
WTF::LockHolder locker(jsThreadMessagesLock);
|
||||
Locker<Lock> locker(jsThreadMessagesLock);
|
||||
jsThreadMessages.append(inputMessage);
|
||||
}
|
||||
|
||||
@@ -432,7 +432,7 @@ const JSC::ClassInfo JSBunInspectorConnection::s_info = { "BunInspectorConnectio
|
||||
extern "C" unsigned int Bun__createJSDebugger(Zig::GlobalObject* globalObject)
|
||||
{
|
||||
{
|
||||
WTF::LockHolder locker(inspectorConnectionsLock);
|
||||
Locker<Lock> locker(inspectorConnectionsLock);
|
||||
if (inspectorConnections == nullptr) {
|
||||
inspectorConnections = new WTF::HashMap<ScriptExecutionContextIdentifier, Vector<BunInspectorConnection*, 8>>();
|
||||
}
|
||||
@@ -474,7 +474,7 @@ extern "C" void BunDebugger__willHotReload()
|
||||
}
|
||||
|
||||
debuggerScriptExecutionContext->postTaskConcurrently([](ScriptExecutionContext& context) {
|
||||
WTF::LockHolder locker(inspectorConnectionsLock);
|
||||
Locker<Lock> locker(inspectorConnectionsLock);
|
||||
for (auto& connections : *inspectorConnections) {
|
||||
for (auto* connection : connections.value) {
|
||||
connection->sendMessageToFrontend("{\"method\":\"Bun.canReload\"}"_s);
|
||||
@@ -502,7 +502,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionCreateConnection, (JSGlobalObject * globalObj
|
||||
targetContext->jsGlobalObject(), shouldRef);
|
||||
|
||||
{
|
||||
WTF::LockHolder locker(inspectorConnectionsLock);
|
||||
Locker<Lock> locker(inspectorConnectionsLock);
|
||||
auto connections = inspectorConnections->get(targetContext->identifier());
|
||||
connections.append(connection);
|
||||
inspectorConnections->set(targetContext->identifier(), connections);
|
||||
|
||||
@@ -67,7 +67,7 @@ public:
|
||||
|
||||
void onMessage(std::string_view message)
|
||||
{
|
||||
WTF::String messageString = WTF::String::fromUTF8(message.data(), message.length());
|
||||
WTF::String messageString = WTF::String::fromUTF8(std::span { message.data(), message.length() });
|
||||
Inspector::JSGlobalObjectDebugger* debugger = reinterpret_cast<Inspector::JSGlobalObjectDebugger*>(this->globalObject->debugger());
|
||||
if (debugger) {
|
||||
debugger->runWhilePausedCallback = [](JSC::JSGlobalObject& globalObject, bool& done) -> void {
|
||||
|
||||
@@ -33,9 +33,6 @@
|
||||
|
||||
// --- Callbacks ---
|
||||
#define FOR_EACH_CALLBACK(macro) \
|
||||
macro(DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump) \
|
||||
macro(_Os) \
|
||||
macro(_Path) \
|
||||
macro(allocUnsafe) \
|
||||
macro(braces) \
|
||||
macro(build) \
|
||||
@@ -45,7 +42,6 @@
|
||||
macro(fs) \
|
||||
macro(gc) \
|
||||
macro(generateHeapSnapshot) \
|
||||
macro(getImportedStyles) \
|
||||
macro(gunzipSync) \
|
||||
macro(gzipSync) \
|
||||
macro(indexOfLine) \
|
||||
@@ -67,8 +63,6 @@
|
||||
macro(which) \
|
||||
macro(write) \
|
||||
macro(stringWidth) \
|
||||
macro(shellParse) \
|
||||
macro(shellLex) \
|
||||
macro(shellEscape) \
|
||||
|
||||
#define DECLARE_ZIG_BUN_OBJECT_CALLBACK(name) extern "C" JSC::EncodedJSValue BunObject_callback_##name(JSC::JSGlobalObject*, JSC::CallFrame*);
|
||||
|
||||
@@ -1,36 +1,39 @@
|
||||
|
||||
#include "root.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include "JavaScriptCore/ArgList.h"
|
||||
#include "JSDOMURL.h"
|
||||
#include "headers.h"
|
||||
#include "helpers.h"
|
||||
#include "IDLTypes.h"
|
||||
#include "DOMURL.h"
|
||||
#include <JavaScriptCore/JSPromise.h>
|
||||
#include <JavaScriptCore/JSBase.h>
|
||||
|
||||
#include <JavaScriptCore/ArgList.h>
|
||||
#include <JavaScriptCore/BuiltinNames.h>
|
||||
#include "ScriptExecutionContext.h"
|
||||
#include "WebCoreJSClientData.h"
|
||||
#include <JavaScriptCore/JSFunction.h>
|
||||
#include <JavaScriptCore/DateInstance.h>
|
||||
#include <JavaScriptCore/InternalFunction.h>
|
||||
#include <JavaScriptCore/FunctionPrototype.h>
|
||||
#include <JavaScriptCore/JSBase.h>
|
||||
#include <JavaScriptCore/JSFunction.h>
|
||||
#include <JavaScriptCore/JSObject.h>
|
||||
#include <JavaScriptCore/JSPromise.h>
|
||||
#include <JavaScriptCore/LazyClassStructure.h>
|
||||
#include <JavaScriptCore/LazyClassStructureInlines.h>
|
||||
#include <JavaScriptCore/FunctionPrototype.h>
|
||||
#include <JavaScriptCore/DateInstance.h>
|
||||
#include <JavaScriptCore/ObjectConstructor.h>
|
||||
#include "headers.h"
|
||||
#include <wtf/Compiler.h>
|
||||
|
||||
#include "JSDOMURL.h"
|
||||
#include "BunObject.h"
|
||||
#include "WebCoreJSBuiltins.h"
|
||||
#include <JavaScriptCore/JSObject.h>
|
||||
|
||||
#include "BunObject+exports.h"
|
||||
#include "DOMJITIDLConvert.h"
|
||||
#include "DOMJITIDLType.h"
|
||||
#include "DOMJITIDLTypeFilter.h"
|
||||
#include "DOMURL.h"
|
||||
#include "Exception.h"
|
||||
#include "BunObject+exports.h"
|
||||
#include "JSDOMException.h"
|
||||
#include "IDLTypes.h"
|
||||
#include "JSDOMConvert.h"
|
||||
#include "wtf/Compiler.h"
|
||||
#include "JSDOMException.h"
|
||||
#include "JSDOMURL.h"
|
||||
#include "PathInlines.h"
|
||||
#include "ScriptExecutionContext.h"
|
||||
#include "WebCoreJSBuiltins.h"
|
||||
#include "WebCoreJSClientData.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
|
||||
namespace Bun {
|
||||
|
||||
@@ -247,20 +250,6 @@ static JSValue constructBunShell(VM& vm, JSObject* bunObject)
|
||||
bunShell->putDirectNativeFunction(vm, globalObject, Identifier::fromString(vm, "braces"_s), 1, BunObject_callback_braces, ImplementationVisibility::Public, NoIntrinsic, JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | 0);
|
||||
bunShell->putDirectNativeFunction(vm, globalObject, Identifier::fromString(vm, "escape"_s), 1, BunObject_callback_shellEscape, ImplementationVisibility::Public, NoIntrinsic, JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly | 0);
|
||||
|
||||
// auto mainShellFunc = JSFunction::create(vm, globalObject, 2, String("$"_s), BunObject_callback_$, ImplementationVisibility::Public);
|
||||
// auto mainShellFunc = JSFunction::create(vm, globalObject, 2, String("$"_s), BunObject_callback_$, ImplementationVisibility::Public);
|
||||
// auto mainShellFunc = shellShellCodeGenerator;
|
||||
if (has_bun_garbage_collector_flag_enabled) {
|
||||
auto parseIdent
|
||||
= Identifier::fromString(vm, String("parse"_s));
|
||||
auto parseFunc = JSFunction::create(vm, globalObject, 2, String("shellParse"_s), BunObject_callback_shellParse, ImplementationVisibility::Private);
|
||||
bunShell->putDirect(vm, parseIdent, parseFunc);
|
||||
|
||||
auto lexIdent = Identifier::fromString(vm, String("lex"_s));
|
||||
auto lexFunc = JSFunction::create(vm, globalObject, 2, String("lex"_s), BunObject_callback_shellLex, ImplementationVisibility::Private);
|
||||
bunShell->putDirect(vm, lexIdent, lexFunc);
|
||||
}
|
||||
|
||||
return bunShell;
|
||||
}
|
||||
|
||||
@@ -535,25 +524,11 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
|
||||
return JSC::JSValue::encode(JSC::jsString(vm, fileSystemPath));
|
||||
}
|
||||
|
||||
JSC_DEFINE_HOST_FUNCTION(functionHashCode,
|
||||
(JSC::JSGlobalObject * globalObject, JSC::CallFrame* callFrame))
|
||||
{
|
||||
JSC::JSValue stringToHash = callFrame->argument(0);
|
||||
JSC::JSString* str = stringToHash.toStringOrNull(globalObject);
|
||||
if (!str) {
|
||||
return JSC::JSValue::encode(jsNumber(0));
|
||||
}
|
||||
|
||||
auto view = str->value(globalObject);
|
||||
return JSC::JSValue::encode(jsNumber(view.hash()));
|
||||
}
|
||||
|
||||
/* Source for BunObject.lut.h
|
||||
@begin bunObjectTable
|
||||
$ constructBunShell ReadOnly|DontDelete|PropertyCallback
|
||||
ArrayBufferSink BunObject_getter_wrap_ArrayBufferSink DontDelete|PropertyCallback
|
||||
CryptoHasher BunObject_getter_wrap_CryptoHasher DontDelete|PropertyCallback
|
||||
DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump BunObject_callback_DO_NOT_USE_OR_YOU_WILL_BE_FIRED_mimalloc_dump DontEnum|DontDelete|Function 1
|
||||
FFI BunObject_getter_wrap_FFI DontDelete|PropertyCallback
|
||||
FileSystemRouter BunObject_getter_wrap_FileSystemRouter DontDelete|PropertyCallback
|
||||
Glob BunObject_getter_wrap_Glob DontDelete|PropertyCallback
|
||||
@@ -567,11 +542,8 @@ JSC_DEFINE_HOST_FUNCTION(functionHashCode,
|
||||
SHA512_256 BunObject_getter_wrap_SHA512_256 DontDelete|PropertyCallback
|
||||
TOML BunObject_getter_wrap_TOML DontDelete|PropertyCallback
|
||||
Transpiler BunObject_getter_wrap_Transpiler DontDelete|PropertyCallback
|
||||
_Os BunObject_callback__Os DontEnum|DontDelete|Function 1
|
||||
_Path BunObject_callback__Path DontEnum|DontDelete|Function 1
|
||||
allocUnsafe BunObject_callback_allocUnsafe DontDelete|Function 1
|
||||
argv BunObject_getter_wrap_argv DontDelete|PropertyCallback
|
||||
assetPrefix BunObject_getter_wrap_assetPrefix DontEnum|DontDelete|PropertyCallback
|
||||
build BunObject_callback_build DontDelete|Function 1
|
||||
concatArrayBuffers functionConcatTypedArrays DontDelete|Function 1
|
||||
connect BunObject_callback_connect DontDelete|Function 1
|
||||
@@ -586,10 +558,8 @@ JSC_DEFINE_HOST_FUNCTION(functionHashCode,
|
||||
fetch Bun__fetch ReadOnly|DontDelete|Function 1
|
||||
file BunObject_callback_file DontDelete|Function 1
|
||||
fileURLToPath functionFileURLToPath DontDelete|Function 1
|
||||
fs BunObject_callback_fs DontEnum|DontDelete|Function 1
|
||||
gc BunObject_callback_gc DontDelete|Function 1
|
||||
generateHeapSnapshot BunObject_callback_generateHeapSnapshot DontDelete|Function 1
|
||||
getImportedStyles BunObject_callback_getImportedStyles DontEnum|DontDelete|Function 1
|
||||
gunzipSync BunObject_callback_gunzipSync DontDelete|Function 1
|
||||
gzipSync BunObject_callback_gzipSync DontDelete|Function 1
|
||||
hash BunObject_getter_wrap_hash DontDelete|PropertyCallback
|
||||
@@ -629,7 +599,6 @@ JSC_DEFINE_HOST_FUNCTION(functionHashCode,
|
||||
stderr BunObject_getter_wrap_stderr DontDelete|PropertyCallback
|
||||
stdin BunObject_getter_wrap_stdin DontDelete|PropertyCallback
|
||||
stdout BunObject_getter_wrap_stdout DontDelete|PropertyCallback
|
||||
stringHashCode functionHashCode DontDelete|Function 1
|
||||
stringWidth BunObject_callback_stringWidth DontDelete|Function 2
|
||||
unsafe BunObject_getter_wrap_unsafe DontDelete|PropertyCallback
|
||||
version constructBunVersion ReadOnly|DontDelete|PropertyCallback
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#include "BunProcess.h"
|
||||
#include <JavaScriptCore/InternalFieldTuple.h>
|
||||
#include <JavaScriptCore/Bun_InternalFieldTuple.h>
|
||||
#include <JavaScriptCore/JSMicrotask.h>
|
||||
#include <JavaScriptCore/ObjectConstructor.h>
|
||||
#include <JavaScriptCore/NumberPrototype.h>
|
||||
@@ -103,6 +103,7 @@ extern "C" uint8_t Bun__setExitCode(void*, uint8_t);
|
||||
extern "C" void* Bun__getVM();
|
||||
extern "C" Zig::GlobalObject* Bun__getDefaultGlobal();
|
||||
extern "C" bool Bun__GlobalObject__hasIPC(JSGlobalObject*);
|
||||
extern "C" bool Bun__ensureProcessIPCInitialized(JSGlobalObject*);
|
||||
extern "C" const char* Bun__githubURL;
|
||||
extern "C" JSC_DECLARE_HOST_FUNCTION(Bun__Process__send);
|
||||
extern "C" JSC_DECLARE_HOST_FUNCTION(Bun__Process__disconnect);
|
||||
@@ -195,7 +196,7 @@ static JSValue constructProcessReleaseObject(VM& vm, JSObject* processObject)
|
||||
release->putDirect(vm, Identifier::fromString(vm, "name"_s), jsString(vm, WTF::String("node"_s)), 0);
|
||||
|
||||
release->putDirect(vm, Identifier::fromString(vm, "lts"_s), jsBoolean(false), 0);
|
||||
release->putDirect(vm, Identifier::fromString(vm, "sourceUrl"_s), jsString(vm, WTF::String(Bun__githubURL, strlen(Bun__githubURL))), 0);
|
||||
release->putDirect(vm, Identifier::fromString(vm, "sourceUrl"_s), jsString(vm, WTF::String(std::span { Bun__githubURL, strlen(Bun__githubURL) })), 0);
|
||||
release->putDirect(vm, Identifier::fromString(vm, "headersUrl"_s), jsEmptyString(vm), 0);
|
||||
release->putDirect(vm, Identifier::fromString(vm, "libUrl"_s), jsEmptyString(vm), 0);
|
||||
|
||||
@@ -278,6 +279,21 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen,
|
||||
}
|
||||
|
||||
WTF::String filename = callFrame->uncheckedArgument(1).toWTFString(globalObject);
|
||||
if (filename.isEmpty()) {
|
||||
JSC::throwTypeError(globalObject, scope, "dlopen requires a non-empty string as the second argument"_s);
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
|
||||
if (filename.startsWith("file://"_s)) {
|
||||
WTF::URL fileURL = WTF::URL(filename);
|
||||
if (!fileURL.isValid() || !fileURL.protocolIsFile()) {
|
||||
JSC::throwTypeError(globalObject, scope, "invalid file: URL passed to dlopen"_s);
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
|
||||
filename = fileURL.fileSystemPath();
|
||||
}
|
||||
|
||||
// Support embedded .node files
|
||||
// See StandaloneModuleGraph.zig for what this "$bunfs" thing is
|
||||
#if OS(WINDOWS)
|
||||
@@ -307,7 +323,7 @@ JSC_DEFINE_HOST_FUNCTION(Process_functionDlopen,
|
||||
LPWSTR messageBuffer = nullptr;
|
||||
size_t size = FormatMessageW(FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS,
|
||||
NULL, errorId, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPWSTR)&messageBuffer, 0, NULL);
|
||||
WTF::String msg = makeString("LoadLibrary failed: ", WTF::StringView((UCHAR*)messageBuffer, size));
|
||||
WTF::String msg = makeString("LoadLibrary failed: ", WTF::StringView(std::span { (UCHAR*)messageBuffer, size }));
|
||||
LocalFree(messageBuffer);
|
||||
#else
|
||||
WTF::String msg = WTF::String::fromUTF8(dlerror());
|
||||
@@ -733,8 +749,10 @@ static void onDidChangeListeners(EventEmitter& eventEmitter, const Identifier& e
|
||||
// IPC handlers
|
||||
if (eventName.string() == "message"_s) {
|
||||
if (isAdded) {
|
||||
if (Bun__GlobalObject__hasIPC(eventEmitter.scriptExecutionContext()->jsGlobalObject())
|
||||
auto* global = eventEmitter.scriptExecutionContext()->jsGlobalObject();
|
||||
if (Bun__GlobalObject__hasIPC(global)
|
||||
&& eventEmitter.listenerCount(eventName) == 1) {
|
||||
Bun__ensureProcessIPCInitialized(global);
|
||||
eventEmitter.scriptExecutionContext()->refEventLoop();
|
||||
eventEmitter.m_hasIPCRef = true;
|
||||
}
|
||||
@@ -1361,7 +1379,7 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb
|
||||
char cwd[PATH_MAX] = { 0 };
|
||||
getcwd(cwd, PATH_MAX);
|
||||
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "cwd"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(cwd), strlen(cwd))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "cwd"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(cwd), strlen(cwd) })), 0);
|
||||
}
|
||||
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "commandLine"_s), JSValue::decode(Bun__Process__getExecArgv(globalObject)), 0);
|
||||
@@ -1377,10 +1395,10 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb
|
||||
struct utsname buf;
|
||||
uname(&buf);
|
||||
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osName"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(buf.sysname), strlen(buf.sysname))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osRelease"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(buf.release), strlen(buf.release))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osVersion"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(buf.version), strlen(buf.version))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osMachine"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(buf.machine), strlen(buf.machine))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osName"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(buf.sysname), strlen(buf.sysname) })), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osRelease"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(buf.release), strlen(buf.release) })), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osVersion"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(buf.version), strlen(buf.version) })), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "osMachine"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(buf.machine), strlen(buf.machine) })), 0);
|
||||
}
|
||||
|
||||
// host
|
||||
@@ -1389,7 +1407,7 @@ static JSValue constructReportObjectComplete(VM& vm, Zig::GlobalObject* globalOb
|
||||
char host[1024] = { 0 };
|
||||
gethostname(host, 1024);
|
||||
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "host"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(host), strlen(host))), 0);
|
||||
header->putDirect(vm, JSC::Identifier::fromString(vm, "host"_s), JSC::jsString(vm, String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(host), strlen(host) })), 0);
|
||||
}
|
||||
|
||||
#if OS(LINUX)
|
||||
@@ -1675,13 +1693,13 @@ static JSValue constructStdioWriteStream(JSC::JSGlobalObject* globalObject, int
|
||||
JSC::JSArray* resultObject = JSC::jsCast<JSC::JSArray*>(result);
|
||||
|
||||
#if OS(WINDOWS)
|
||||
Zig::GlobalObject* globalThis = jsCast<Zig::GlobalObject*>(globalObject);
|
||||
// Node.js docs - https://nodejs.org/api/process.html#a-note-on-process-io
|
||||
// > Files: synchronous on Windows and POSIX
|
||||
// > TTYs (Terminals): asynchronous on Windows, synchronous on POSIX
|
||||
// > Pipes (and sockets): synchronous on Windows, asynchronous on POSIX
|
||||
// > Synchronous writes avoid problems such as output written with console.log() or console.error() being unexpectedly interleaved, or not written at all if process.exit() is called before an asynchronous write completes. See process.exit() for more information.
|
||||
Bun__ForceFileSinkToBeSynchronousOnWindows(globalThis, JSValue::encode(resultObject->getIndex(globalObject, 1)));
|
||||
Zig::GlobalObject* globalThis = jsCast<Zig::GlobalObject*>(globalObject);
|
||||
// Node.js docs - https://nodejs.org/api/process.html#a-note-on-process-io
|
||||
// > Files: synchronous on Windows and POSIX
|
||||
// > TTYs (Terminals): asynchronous on Windows, synchronous on POSIX
|
||||
// > Pipes (and sockets): synchronous on Windows, asynchronous on POSIX
|
||||
// > Synchronous writes avoid problems such as output written with console.log() or console.error() being unexpectedly interleaved, or not written at all if process.exit() is called before an asynchronous write completes. See process.exit() for more information.
|
||||
Bun__ForceFileSinkToBeSynchronousOnWindows(globalThis, JSValue::encode(resultObject->getIndex(globalObject, 1)));
|
||||
#endif
|
||||
|
||||
return resultObject->getIndex(globalObject, 0);
|
||||
|
||||
@@ -226,7 +226,7 @@ extern "C" BunString BunString__fromUTF8(const char* bytes, size_t length)
|
||||
return { BunStringTag::WTFStringImpl, { .wtf = &impl.leakRef() } };
|
||||
}
|
||||
|
||||
auto str = WTF::String::fromUTF8ReplacingInvalidSequences(reinterpret_cast<const LChar*>(bytes), length);
|
||||
auto str = WTF::String::fromUTF8ReplacingInvalidSequences(std::span { reinterpret_cast<const LChar*>(bytes), length });
|
||||
str.impl()->ref();
|
||||
return Bun::toString(str);
|
||||
}
|
||||
@@ -234,7 +234,7 @@ extern "C" BunString BunString__fromUTF8(const char* bytes, size_t length)
|
||||
extern "C" BunString BunString__fromLatin1(const char* bytes, size_t length)
|
||||
{
|
||||
ASSERT(length > 0);
|
||||
return { BunStringTag::WTFStringImpl, { .wtf = &WTF::StringImpl::create(bytes, length).leakRef() } };
|
||||
return { BunStringTag::WTFStringImpl, { .wtf = &WTF::StringImpl::create(std::span { bytes, length }).leakRef() } };
|
||||
}
|
||||
|
||||
extern "C" BunString BunString__fromUTF16ToLatin1(const char16_t* bytes, size_t length)
|
||||
@@ -256,7 +256,7 @@ extern "C" BunString BunString__fromUTF16ToLatin1(const char16_t* bytes, size_t
|
||||
extern "C" BunString BunString__fromUTF16(const char16_t* bytes, size_t length)
|
||||
{
|
||||
ASSERT(length > 0);
|
||||
return { BunStringTag::WTFStringImpl, { .wtf = &WTF::StringImpl::create(bytes, length).leakRef() } };
|
||||
return { BunStringTag::WTFStringImpl, { .wtf = &WTF::StringImpl::create(std::span { bytes, length }).leakRef() } };
|
||||
}
|
||||
|
||||
extern "C" BunString BunString__fromBytes(const char* bytes, size_t length)
|
||||
|
||||
@@ -29,49 +29,46 @@
|
||||
* different value. In that case, it will have a stale value.
|
||||
*/
|
||||
|
||||
#include "headers.h"
|
||||
#include "JavaScriptCore/JSCast.h"
|
||||
#include <JavaScriptCore/JSMapInlines.h>
|
||||
#include "root.h"
|
||||
#include "JavaScriptCore/SourceCode.h"
|
||||
#include "headers-handwritten.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include "headers.h"
|
||||
|
||||
#include <JavaScriptCore/Completion.h>
|
||||
#include <JavaScriptCore/DFGAbstractHeap.h>
|
||||
#include <JavaScriptCore/FunctionPrototype.h>
|
||||
#include <JavaScriptCore/GetterSetter.h>
|
||||
#include <JavaScriptCore/HeapAnalyzer.h>
|
||||
#include <JavaScriptCore/Identifier.h>
|
||||
#include <JavaScriptCore/JSCast.h>
|
||||
#include <JavaScriptCore/JSMap.h>
|
||||
#include <JavaScriptCore/JSMapInlines.h>
|
||||
#include <JavaScriptCore/JSModuleNamespaceObject.h>
|
||||
#include <JavaScriptCore/JSSourceCode.h>
|
||||
#include <JavaScriptCore/JSString.h>
|
||||
#include <JavaScriptCore/ObjectConstructor.h>
|
||||
#include <JavaScriptCore/OptionsList.h>
|
||||
#include <JavaScriptCore/ParserError.h>
|
||||
#include <JavaScriptCore/ScriptExecutable.h>
|
||||
#include <JavaScriptCore/SourceOrigin.h>
|
||||
#include <JavaScriptCore/StackFrame.h>
|
||||
#include <JavaScriptCore/StackVisitor.h>
|
||||
#include "BunClientData.h"
|
||||
#include <JavaScriptCore/Identifier.h>
|
||||
#include "ImportMetaObject.h"
|
||||
|
||||
#include <JavaScriptCore/TypedArrayInlines.h>
|
||||
#include <JavaScriptCore/PropertyNameArray.h>
|
||||
#include <JavaScriptCore/JSWeakMap.h>
|
||||
#include <JavaScriptCore/JSWeakMapInlines.h>
|
||||
#include <JavaScriptCore/JSWithScope.h>
|
||||
|
||||
#include <JavaScriptCore/DFGAbstractHeap.h>
|
||||
#include <JavaScriptCore/Completion.h>
|
||||
#include "ModuleLoader.h"
|
||||
#include <JavaScriptCore/JSMap.h>
|
||||
|
||||
#include <JavaScriptCore/JSMapInlines.h>
|
||||
#include <JavaScriptCore/GetterSetter.h>
|
||||
#include "ZigSourceProvider.h"
|
||||
#include <JavaScriptCore/FunctionPrototype.h>
|
||||
#include "CommonJSModuleRecord.h"
|
||||
#include <JavaScriptCore/JSModuleNamespaceObject.h>
|
||||
#include <JavaScriptCore/JSSourceCode.h>
|
||||
#include <JavaScriptCore/LazyPropertyInlines.h>
|
||||
#include <JavaScriptCore/HeapAnalyzer.h>
|
||||
#include <JavaScriptCore/ObjectConstructor.h>
|
||||
#include <JavaScriptCore/OptionsList.h>
|
||||
#include <JavaScriptCore/ParserError.h>
|
||||
#include <JavaScriptCore/PropertyNameArray.h>
|
||||
#include <JavaScriptCore/ScriptExecutable.h>
|
||||
#include <JavaScriptCore/SourceCode.h>
|
||||
#include <JavaScriptCore/SourceOrigin.h>
|
||||
#include <JavaScriptCore/StackFrame.h>
|
||||
#include <JavaScriptCore/StackVisitor.h>
|
||||
#include <JavaScriptCore/TypedArrayInlines.h>
|
||||
#include <wtf/NakedPtr.h>
|
||||
#include <wtf/URL.h>
|
||||
|
||||
#include "ModuleLoader.h"
|
||||
#include "CommonJSModuleRecord.h"
|
||||
|
||||
#include "BunClientData.h"
|
||||
#include "ImportMetaObject.h"
|
||||
#include "PathInlines.h"
|
||||
#include "wtf/NakedPtr.h"
|
||||
#include "wtf/URL.h"
|
||||
#include "ZigGlobalObject.h"
|
||||
#include "ZigSourceProvider.h"
|
||||
|
||||
extern "C" bool Bun__isBunMain(JSC::JSGlobalObject* global, const BunString*);
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
#pragma once
|
||||
#include "JavaScriptCore/JSGlobalObject.h"
|
||||
#include "root.h"
|
||||
#include "headers-handwritten.h"
|
||||
#include "wtf/NakedPtr.h"
|
||||
|
||||
#include <JavaScriptCore/JSGlobalObject.h>
|
||||
#include <wtf/NakedPtr.h>
|
||||
|
||||
namespace Zig {
|
||||
class GlobalObject;
|
||||
|
||||
@@ -1,23 +1,20 @@
|
||||
#include "root.h"
|
||||
|
||||
#include "JavaScriptCore/ArgList.h"
|
||||
#include "headers.h"
|
||||
#include "ConsoleObject.h"
|
||||
|
||||
#include <JavaScriptCore/ArgList.h>
|
||||
#include <JavaScriptCore/ConsoleClient.h>
|
||||
#include <JavaScriptCore/ConsoleMessage.h>
|
||||
#include <JavaScriptCore/InspectorConsoleAgent.h>
|
||||
#include <JavaScriptCore/InspectorDebuggerAgent.h>
|
||||
#include <JavaScriptCore/InspectorScriptProfilerAgent.h>
|
||||
#include <JavaScriptCore/JSGlobalObjectDebuggable.h>
|
||||
#include <JavaScriptCore/JSGlobalObjectInspectorController.h>
|
||||
#include <JavaScriptCore/JSString.h>
|
||||
#include <JavaScriptCore/ScriptArguments.h>
|
||||
#include <wtf/text/WTFString.h>
|
||||
|
||||
#include <JavaScriptCore/JSGlobalObjectInspectorController.h>
|
||||
#include <JavaScriptCore/JSGlobalObjectDebuggable.h>
|
||||
#include <JavaScriptCore/ConsoleClient.h>
|
||||
|
||||
#include "ConsoleObject.h"
|
||||
#include "GCDefferalContext.h"
|
||||
#include <JavaScriptCore/InspectorScriptProfilerAgent.h>
|
||||
#include <JavaScriptCore/InspectorDebuggerAgent.h>
|
||||
#include <JavaScriptCore/InspectorConsoleAgent.h>
|
||||
|
||||
namespace Bun {
|
||||
using namespace JSC;
|
||||
|
||||
11
src/bun.js/bindings/DoubleFormatter.cpp
Normal file
11
src/bun.js/bindings/DoubleFormatter.cpp
Normal file
@@ -0,0 +1,11 @@
|
||||
#include "root.h"
|
||||
#include "wtf/dtoa.h"
|
||||
#include <cstring>
|
||||
|
||||
/// Must be called with a buffer of exactly 124
|
||||
/// Find the length by scanning for the 0
|
||||
extern "C" void WTF__dtoa(char* buf_124_bytes, double number)
|
||||
{
|
||||
NumberToStringBuffer& buf = *reinterpret_cast<NumberToStringBuffer*>(buf_124_bytes);
|
||||
WTF::numberToString(number, buf);
|
||||
}
|
||||
@@ -293,13 +293,8 @@ bool JSCStackFrame::calculateSourcePositions()
|
||||
* Note that we're using m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeOffset rather than m_codeBlock->expressionRangeForBytecodeOffset
|
||||
* in order get the "raw" offsets and avoid the CodeBlock's expressionRangeForBytecodeOffset modifications to the line and column numbers,
|
||||
* (we don't need the column number from it, and we'll calculate the line "fixes" ourselves). */
|
||||
unsigned startOffset = 0;
|
||||
unsigned endOffset = 0;
|
||||
unsigned divotPoint = 0;
|
||||
unsigned line = 0;
|
||||
unsigned unusedColumn = 0;
|
||||
m_codeBlock->unlinkedCodeBlock()->expressionRangeForBytecodeIndex(bytecodeIndex, divotPoint, startOffset, endOffset, line, unusedColumn);
|
||||
divotPoint += m_codeBlock->sourceOffset();
|
||||
ExpressionInfo::Entry info = m_codeBlock->unlinkedCodeBlock()->expressionInfoForBytecodeIndex(bytecodeIndex);
|
||||
info.divot += m_codeBlock->sourceOffset();
|
||||
|
||||
/* On the first line of the source code, it seems that we need to "fix" the column with the starting
|
||||
* offset. We currently use codeBlock->source()->startPosition().m_column.oneBasedInt() as the
|
||||
@@ -307,15 +302,15 @@ bool JSCStackFrame::calculateSourcePositions()
|
||||
* (and what CodeBlock::expressionRangeForBytecodeOffset does). This is because firstLineColumnOffset
|
||||
* values seems different from what we expect (according to v8's tests) and I haven't dove into the
|
||||
* relevant parts in JSC (yet) to figure out why. */
|
||||
unsigned columnOffset = line ? 0 : m_codeBlock->source().startColumn().zeroBasedInt();
|
||||
unsigned columnOffset = info.lineColumn.line ? 0 : m_codeBlock->source().startColumn().zeroBasedInt();
|
||||
|
||||
// "Fix" the line number
|
||||
JSC::ScriptExecutable* executable = m_codeBlock->ownerExecutable();
|
||||
line = executable->overrideLineNumber(m_vm).value_or(line + executable->firstLine());
|
||||
info.lineColumn.line = executable->overrideLineNumber(m_vm).value_or(info.lineColumn.line + executable->firstLine());
|
||||
|
||||
// Calculate the staring\ending offsets of the entire expression
|
||||
int expressionStart = divotPoint - startOffset;
|
||||
int expressionStop = divotPoint + endOffset;
|
||||
int expressionStart = info.divot - info.startOffset;
|
||||
int expressionStop = info.divot + info.endOffset;
|
||||
|
||||
// Make sure the range is valid
|
||||
StringView sourceString = m_codeBlock->source().provider()->source();
|
||||
@@ -345,7 +340,7 @@ bool JSCStackFrame::calculateSourcePositions()
|
||||
*/
|
||||
m_sourcePositions.expressionStart = WTF::OrdinalNumber::fromZeroBasedInt(expressionStart);
|
||||
m_sourcePositions.expressionStop = WTF::OrdinalNumber::fromZeroBasedInt(expressionStop);
|
||||
m_sourcePositions.line = WTF::OrdinalNumber::fromZeroBasedInt(static_cast<int>(line));
|
||||
m_sourcePositions.line = WTF::OrdinalNumber::fromZeroBasedInt(static_cast<int>(info.lineColumn.line));
|
||||
m_sourcePositions.startColumn = WTF::OrdinalNumber::fromZeroBasedInt((expressionStart - lineStart) + columnOffset);
|
||||
m_sourcePositions.endColumn = WTF::OrdinalNumber::fromZeroBasedInt(m_sourcePositions.startColumn.zeroBasedInt() + (expressionStop - expressionStart));
|
||||
m_sourcePositions.lineStart = WTF::OrdinalNumber::fromZeroBasedInt(static_cast<int>(lineStart));
|
||||
|
||||
@@ -345,55 +345,96 @@ JSC_DEFINE_HOST_FUNCTION(functionImportMeta__resolve,
|
||||
JSC::VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
|
||||
switch (callFrame->argumentCount()) {
|
||||
case 0: {
|
||||
// not "requires" because "require" could be confusing
|
||||
JSC::throwTypeError(globalObject, scope, "import.meta.resolve needs 1 argument (a string)"_s);
|
||||
scope.release();
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
default: {
|
||||
JSC::JSValue moduleName = callFrame->argument(0);
|
||||
auto thisValue = callFrame->thisValue();
|
||||
auto specifierValue = callFrame->argument(0);
|
||||
// 1. Set specifier to ? ToString(specifier).
|
||||
auto specifier = specifierValue.toWTFString(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
|
||||
if (moduleName.isUndefinedOrNull()) {
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
JSC::throwTypeError(globalObject, scope, "import.meta.resolve expects a string"_s);
|
||||
scope.release();
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
// Node.js allows a second argument for parent
|
||||
JSValue from = {};
|
||||
|
||||
JSC__JSValue from;
|
||||
if (callFrame->argumentCount() >= 2) {
|
||||
JSValue fromValue = callFrame->uncheckedArgument(1);
|
||||
|
||||
if (callFrame->argumentCount() > 1 && callFrame->argument(1).isString()) {
|
||||
from = JSC::JSValue::encode(callFrame->argument(1));
|
||||
} else {
|
||||
JSC::JSObject* thisObject = JSC::jsDynamicCast<JSC::JSObject*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
JSC::throwTypeError(globalObject, scope, "import.meta.resolve must be bound to an import.meta object"_s);
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
|
||||
auto clientData = WebCore::clientData(vm);
|
||||
|
||||
from = JSC::JSValue::encode(thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().pathPublicName()));
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
}
|
||||
|
||||
if (globalObject->onLoadPlugins.hasVirtualModules()) {
|
||||
if (moduleName.isString()) {
|
||||
auto moduleString = moduleName.toWTFString(globalObject);
|
||||
if (auto resolvedString = globalObject->onLoadPlugins.resolveVirtualModule(moduleString, JSValue::decode(from).toWTFString(globalObject))) {
|
||||
if (moduleString == resolvedString.value())
|
||||
return JSC::JSValue::encode(JSPromise::resolvedPromise(globalObject, moduleName));
|
||||
return JSC::JSValue::encode(JSPromise::resolvedPromise(globalObject, jsString(vm, resolvedString.value())));
|
||||
if (!fromValue.isUndefinedOrNull() && fromValue.isObject()) {
|
||||
if (JSValue pathsObject = fromValue.getObject()->getIfPropertyExists(globalObject, JSC::Identifier::fromString(vm, "paths"_s))) {
|
||||
if (pathsObject.isCell() && pathsObject.asCell()->type() == JSC::JSType::ArrayType) {
|
||||
auto* pathsArray = JSC::jsCast<JSC::JSArray*>(pathsObject);
|
||||
if (pathsArray->length() > 0) {
|
||||
fromValue = pathsArray->getIndex(globalObject, 0);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Bun__resolve(globalObject, JSC::JSValue::encode(moduleName), from, true);
|
||||
if (fromValue.isString()) {
|
||||
from = fromValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!from) {
|
||||
auto* thisObject = JSC::jsDynamicCast<JSC::JSObject*>(thisValue);
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
JSC::throwTypeError(globalObject, scope, "import.meta.resolve must be bound to an import.meta object"_s);
|
||||
RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
}
|
||||
|
||||
auto clientData = WebCore::clientData(vm);
|
||||
JSValue pathProperty = thisObject->getIfPropertyExists(globalObject, clientData->builtinNames().pathPublicName());
|
||||
|
||||
if (LIKELY(pathProperty && pathProperty.isString())) {
|
||||
from = pathProperty;
|
||||
} else {
|
||||
auto scope = DECLARE_THROW_SCOPE(globalObject->vm());
|
||||
JSC::throwTypeError(globalObject, scope, "import.meta.resolve must be bound to an import.meta object"_s);
|
||||
RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
}
|
||||
}
|
||||
ASSERT(from);
|
||||
|
||||
// from.toWTFString() *should* always be the fast case, since above we check that it's a string.
|
||||
auto fromWTFString = from.toWTFString(globalObject);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
|
||||
// Try to resolve it to a relative file path. This path is not meant to throw module resolution errors.
|
||||
if (specifier.startsWith("./"_s) || specifier.startsWith("../"_s) || specifier.startsWith("/"_s) || specifier.startsWith("file://"_s)
|
||||
#if OS(WINDOWS)
|
||||
|| specifier.startsWith(".\\"_s) || specifier.startsWith("..\\"_s) || specifier.startsWith("\\"_s)
|
||||
#endif
|
||||
) {
|
||||
auto fromURL = fromWTFString.startsWith("file://"_s) ? WTF::URL(fromWTFString) : WTF::URL::fileURLWithFileSystemPath(fromWTFString);
|
||||
if (!fromURL.isValid()) {
|
||||
JSC::throwTypeError(globalObject, scope, "`parent` is not a valid Filepath / URL"_s);
|
||||
RELEASE_AND_RETURN(scope, JSC::JSValue::encode(JSC::JSValue {}));
|
||||
}
|
||||
|
||||
WTF::URL url(fromURL, specifier);
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(jsString(vm, url.string())));
|
||||
}
|
||||
|
||||
// In Node.js, `node:doesnotexist` resolves to `node:doesnotexist`
|
||||
if (UNLIKELY(specifier.startsWith("node:"_s)) || UNLIKELY(specifier.startsWith("bun:"_s))) {
|
||||
return JSValue::encode(jsString(vm, specifier));
|
||||
}
|
||||
|
||||
// Run it through the module resolver, errors at this point are actual errors.
|
||||
auto a = Bun::toString(specifier);
|
||||
auto b = Bun::toString(fromWTFString);
|
||||
auto result = JSValue::decode(Bun__resolveSyncWithStrings(globalObject, &a, &b, true));
|
||||
if (!result.isString()) {
|
||||
JSC::throwException(globalObject, scope, result);
|
||||
return JSC::JSValue::encode(JSC::JSValue {});
|
||||
}
|
||||
|
||||
auto resultString = result.toWTFString(globalObject);
|
||||
if (isAbsolutePath(resultString)) {
|
||||
// file path -> url
|
||||
RELEASE_AND_RETURN(scope, JSValue::encode(jsString(vm, WTF::URL::fileURLWithFileSystemPath(resultString).string())));
|
||||
}
|
||||
return JSValue::encode(result);
|
||||
}
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsImportMetaObjectGetter_url, (JSGlobalObject * globalObject, JSC::EncodedJSValue thisValue, PropertyName propertyName))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user