Compare commits

..

2 Commits

Author SHA1 Message Date
Dylan Conway
b185cd134b always set exit code 2023-01-18 16:20:12 -08:00
Dylan Conway
37f72ef632 set exit code if signal is 42 2023-01-18 16:08:20 -08:00
357 changed files with 26816 additions and 39855 deletions

30
.github/workflows/bun-homebrew.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: bun-homebrew
on:
release:
types:
- published
- edited
jobs:
homebrew:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ env.HOMEBREW_TOKEN }}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.6'
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Release ${{ github.event.release.tag_name }}

View File

@@ -1,165 +0,0 @@
name: bun-linux
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
# - name: Sign Release
# id: sign-release
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# env:
# GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
# run: |
# echo "$GPG_PASSPHRASE" | bun run .scripts/sign-release.ts
# - name: Release Checksum
# id: release-checksum
# uses: ncipollo/release-action@v1
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# with:
# prerelease: true
# body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
# allowUpdates: true
# replacesArtifacts: true
# generateReleaseNotes: true
# artifactErrorsFailBuild: true
# token: ${{ secrets.GITHUB_TOKEN }}
# name: "Canary (${{github.sha}})"
# tag: "canary"
# artifacts: "SHASUMS256.txt,SHASUMS256.txt.asc"

View File

@@ -32,25 +32,32 @@ jobs:
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
@@ -121,20 +128,10 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -146,3 +143,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -145,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -242,14 +242,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -257,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -370,9 +370,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -243,14 +243,14 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -258,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -374,9 +374,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -119,16 +119,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -137,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,14 +245,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -260,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -376,9 +376,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -1,116 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -1,176 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Bun v${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>

View File

@@ -1,43 +0,0 @@
name: bun-test
on:
push:
branches:
- main
- "test/*"
paths:
- "src/**/*"
- "test/**/*"
pull_request:
branches:
- main
- "test/*"
paths:
- "src/**/*"
- "test/**/*"
workflow_dispatch:
inputs:
release:
type: string
default: canary
jobs:
bun:
name: Bun
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-test
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: ${{ github.event.inputs.release || 'canary' }}
- id: setup-dependencies
name: Setup Dependencies
run: bun install
- id: test
name: Test
run: bun run test

View File

@@ -0,0 +1,60 @@
name: Release bun-types@canary
on:
push:
branches: [main]
paths:
- 'packages/bun-types/**'
jobs:
tests:
name: Build, test, publish canary
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test
- name: Set temp version
working-directory: packages/bun-types/dist
run: |
git_hash=$(git rev-parse --short "$GITHUB_SHA")
new_pkg_version="$(bun --version)-canary.${git_hash}"
echo "new_pkg_version"
echo "${new_pkg_version}"
npm version ${new_pkg_version} --no-git-tag-version
- name: Publish to NPM
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_BUN_TYPES_TOKEN }}
# dry-run: true
tag: canary
# - name: Publish on NPM
# working-directory: packages/bun-types/dist
# run: npm publish --access public --tag canary # --dry-run
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# NODE_AUTH_TOKEN: ${{ secrets.NPM_BUN_TYPES_TOKEN }}

135
.github/workflows/bun-types-release.yml vendored Normal file
View File

@@ -0,0 +1,135 @@
name: Release bun-types
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun upgrade --canary; bun install
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish to NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://registry.npmjs.org"
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish to NPM
working-directory: packages/bun-types/dist
run: npm publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://npm.pkg.github.com/"
scope: "@oven-sh"
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

4
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -109,4 +110,3 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/debug-bindings-obj

View File

@@ -1,6 +1,7 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -12,7 +12,7 @@ ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}

View File

@@ -8,7 +8,7 @@ ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
@@ -99,7 +99,7 @@ RUN tar -xf ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-$BUILDARCH.tar.gz && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null

View File

@@ -35,7 +35,7 @@ DOCKER_BUILDARCH = amd64
BREW_PREFIX_PATH = /usr/local
DEFAULT_MIN_MACOS_VERSION = 10.14
MARCH_NATIVE = -march=$(CPU_TARGET) -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH = -march=nehalem
NATIVE_OR_OLD_MARCH = -march=westmere
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
@@ -509,9 +509,6 @@ npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
@@ -1394,19 +1391,10 @@ bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG
.PHONY: jsc-bindings-mac
jsc-bindings-mac: bindings
# lInux only
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
.PHONY: mimalloc-debug
mimalloc-debug:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; make clean || echo ""; \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} ${MIMALLOC_VALGRIND_ENABLED_FLAG} \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} \
-DCMAKE_BUILD_TYPE=Debug \
-DMI_DEBUG_FULL=1 \
-DMI_SKIP_COLLECT_ON_EXIT=1 \

View File

@@ -29,12 +29,6 @@ Native: (macOS x64 & Silicon, Linux x64, Windows Subsystem for Linux)
curl -fsSL https://bun.sh/install | bash
```
npm:
```sh
npm install -g bun
```
Homebrew: (MacOS and Linux)
```sh
@@ -422,21 +416,6 @@ Assuming a package.json with a `"clean"` command in `"scripts"`:
}
```
## Using bun as a WebAssembly runner
Bun v0.5.2 added experimental support for the [WebAssembly System Interface](https://github.com/WebAssembly/WASI) (WASI). This means you can run WebAssembly binaries in Bun.
To run a WASI binary, use `bun run`:
```bash
bun run ./my-wasm-app.wasm
# you can omit "run" if the filename ends with .wasm
bun ./my-wasm-app.wasm
```
WASI support is based on [wasi-js](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js). Currently, it only supports WASI binaries that use the `wasi_snapshot_preview1` or `wasi_unstable` APIs. Bun's implementation is not optimized for performance, but if this feature gets popular, we'll definitely invest time in making it faster.
## Creating a Discord bot with Bun
### Application Commands
@@ -602,6 +581,7 @@ You can see [Bun's Roadmap](https://github.com/oven-sh/bun/issues/159), but here
| ------------------------------------------------------------------------------------- | -------------- |
| Web Streams with Fetch API | bun.js |
| Web Streams with HTMLRewriter | bun.js |
| Package hoisting that matches npm behavior | bun install |
| Source Maps (unbundled is supported) | JS Bundler |
| Source Maps | CSS |
| JavaScript Minifier | JS Transpiler |
@@ -1275,7 +1255,7 @@ bun install --backend copyfile
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has it's own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
```bash
rm -rf node_modules
@@ -1296,7 +1276,7 @@ buns usage of `Cache-Control` ignores `Age`. This improves performance, but m
### `bun run`
`bun run` is a fast `package.json` script runner and executable runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
`bun run` is a fast `package.json` script runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
By default, `bun run` prints the script that will be invoked:
@@ -1311,7 +1291,7 @@ You can disable that with `--silent`
bun run --silent clean
```
`bun run ${script-name}` runs the equivalent of `npm run script-name`, `npx bin-name`, and `node file-name` all in one command. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${script-name}` runs the equivalent of `npm run script-name`. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${javascript-file.js}` will run it with bun, as long as the file doesn't have a node shebang.
@@ -2867,24 +2847,21 @@ console.log(address); // "2606:2800:220:1:248:1893:25c8:1946"
```
Bun supports three backends for DNS resolution:
- `c-ares` - This is the default on Linux, and it uses the [c-ares](https://c-ares.org/) library to perform DNS resolution.
- `system` - Uses the system's non-blocking DNS resolver, if available. Otherwise, falls back to `getaddrinfo`. This is the default on macOS, and the same as `getaddrinfo` on Linux.
- `getaddrinfo` - Uses the POSIX standard `getaddrinfo` function, which may cause performance issues under concurrent load.
- `c-ares` - This is the default on Linux, and it uses the [c-ares](https://c-ares.org/) library to perform DNS resolution.
- `system` - Uses the system's non-blocking DNS resolver, if available. Otherwise, falls back to `getaddrinfo`. This is the default on macOS, and the same as `getaddrinfo` on Linux.
- `getaddrinfo` - Uses the POSIX standard `getaddrinfo` function, which may cause performance issues under concurrent load.
You can choose a particular backend by specifying `backend` as an option.
```ts
import { dns } from "bun";
const [{ address, ttl }] = await dns.lookup("example.com", {
backend: "c-ares",
});
const [{ address, ttl }] = await dns.lookup("example.com", { backend: "c-ares" });
console.log(address); // "93.184.216.34"
console.log(ttl); // 21237
```
Note: the `ttl` property is only accurate when the `backend` is c-ares. Otherwise, `ttl` will be `0`.
Note: the `ttl` property is only accurate when the `backend` is c-ares. Otherwise, `ttl` will be `0`.
This was added in Bun v0.5.0.
@@ -4646,7 +4623,7 @@ It will check the lockfile for the version. If the lockfile doesn't have a versi
Lowlights:
- TypeScript type support isn't implemented yet
- TypeScript type support isn't implmented yet
- patch package support isn't implemented yet
#### Resolving packages
@@ -5007,7 +4984,6 @@ bun also statically links these libraries:
- [`c-ares`](https://github.com/c-ares/c-ares), which is MIT licensed
- `libicu` 72, which can be found here: <https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE>
- A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets), which is Apache 2.0 licensed
- WASI implementation from [`wasi-js`](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js), which is BSD 3 clause licensed. Note that wasi-js is originally based on [wasmer-js](https://github.com/wasmerio/wasmer-js), which is MIT licensed. wasmer-js was based on [node-wasi](https://github.com/devsnek/node-wasi) by Gus Caplan (also MIT licensed). You can [read more about the history here](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js#history).
For compatibility reasons, these NPM packages are embedded into buns binary and injected if imported.

View File

@@ -1,49 +1,9 @@
import { bench, run } from "mitata";
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";
bench("Buffer.isBuffer(buffer)", () => {
return Buffer.isBuffer(isBuffer);
});
{
var j = 0;
j += 1;
j += eval("'ok'");
bench("Buffer.isBuffer(string)", () => {
return Buffer.isBuffer(j);
});
}
bench("Buffer.from('short string')", () => {
return Buffer.from("short string");
});
const loooong = "long string".repeat(9999).split("").join(" ");
bench("Buffer.byteLength('long string'.repeat(9999))", () => {
return Buffer.byteLength(loooong);
});
var hundred = new ArrayBuffer(100);
bench("Buffer.from(ArrayBuffer(100))", () => {
return Buffer.from(hundred);
});
var hundredArray = new Uint8Array(100);
bench("Buffer.from(Uint8Array(100))", () => {
return Buffer.from(hundredArray);
});
var empty = new Uint8Array(0);
bench("Buffer.from(Uint8Array(0))", () => {
return Buffer.from(empty);
});
bench("new Buffer(Uint8Array(0))", () => {
return new Buffer(empty);
bench("new Buffer(0)", () => {
return new Buffer(0);
});
bench(`new Buffer(${N})`, () => {
@@ -66,4 +26,4 @@ bench("Buffer.alloc(24_000)", () => {
return Buffer.alloc(24_000);
});
await run({});
await run();

Binary file not shown.

View File

@@ -1,81 +1,71 @@
const EventEmitterNative = require("events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
const EventEmitter = require("events").EventEmitter;
import { bench, run } from "mitata";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", (event) => {
event.preventDefault();
const emitter = new EventEmitter();
const event = new Event("hello");
emitter.on("hello", (event) => {
event.preventDefault();
});
var id = 0;
bench("EventEmitter.emit", () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench("[monkey] EventEmitter.emit", () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
if (!called) {
throw new Error("monkey failed");
}
});
bench("EventEmitter.on x 10_000 (handler)", () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
bench("[monkey] EventEmitter.on x 10_000 (handler)", () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
monkey.off("hey", cb);
});
var target = new EventTarget();
target.addEventListener("hello", (event) => {});

View File

@@ -1,25 +0,0 @@
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
bench("C++ fn regular", () => {
regular();
});
bench("C++ fn", () => {
fn();
});
bench("C++ getter", () => {
return noop.getterSetter;
});
bench("C++ setter", () => {
noop.getterSetter = 1;
});
run();

View File

@@ -1,6 +0,0 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
}
}

View File

@@ -1,102 +1,58 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bench, run, group } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const esbuild_ = require("esbuild/lib/main");
const swc_ = require("@swc/core");
const babel_ = require("@babel/core");
const code = readFileSync(
dirname(fileURLToPath(import.meta.url)) +
"/../../src/test/fixtures/simple.jsx",
"utf-8",
);
async function getWithName(name) {
let transformSync;
let transform;
let opts;
if (name === "bun") {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (name === "esbuild") {
try {
transformSync = esbuild_.transformSync;
transform = esbuild_.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (name === "swc") {
try {
transformSync = swc_.transformSync;
transform = swc_.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (name === "babel") {
try {
transformSync = babel_.transformSync;
transform = babel_.transform;
opts = {
sourceMaps: false,
presets: ["@babel/preset-react"],
};
} catch (exception) {
throw exception;
}
var transformSync;
var transform;
var opts;
if (process.isBun) {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (process.env["esbuild"]) {
try {
const esbuild = await import("esbuild");
transformSync = esbuild.transformSync;
transform = esbuild.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (process.env["swc"]) {
try {
const swc = await import("@swc/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (process.env["babel"]) {
try {
const swc = await import("@babel/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
presets: [(await import("@babel/preset-react")).default],
};
} catch (exception) {
throw exception;
}
return {
transformSync,
transform,
opts,
name,
};
}
const bun = process.isBun ? await getWithName("bun") : null;
const esbuild = await getWithName("esbuild");
const swc = await getWithName("swc");
const babel = await getWithName("babel");
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
const transpilers = [bun, esbuild, swc, babel].filter(Boolean);
group("transformSync (" + ((code.length / 1024) | 0) + " KB jsx file)", () => {
for (let { name, transformSync, opts } of transpilers) {
bench(name, () => {
transformSync(code, opts);
});
}
});
group("tranform x 5", () => {
for (let { name, transform, opts } of transpilers) {
bench(name, async () => {
return Promise.all([
transform(code, opts),
transform(code + "\n", opts),
transform("\n" + code + "\n", opts),
transform("\n" + code + "\n\n", opts),
transform("\n\n" + code + "\n\n", opts),
]);
});
}
});
await run();
if (process.env.ASYNC) {
console.log(await transform(code, opts));
} else {
console.log(transformSync(code, opts));
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,84 +0,0 @@
# https://hub.docker.com/_/debian
# https://hub.docker.com/_/ubuntu
ARG IMAGE=debian:bullseye-slim
FROM $IMAGE AS base
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM $IMAGE
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home
USER 1000:1000
COPY --from=base --chown=1000:1000 /usr/local/bin/bun /usr/local/bin
COPY --from=base --chown=1000:1000 /usr/local/bin/bunx /usr/local/bin
RUN which bun \
&& which bunx \
&& bun --version
WORKDIR /home/bun/app
CMD ["bun"]

View File

@@ -1,76 +0,0 @@
import { file, serve } from "bun";
import { existsSync, statSync } from "fs";
serve({
fetch(req: Request) {
let pathname = new URL(req.url).pathname.substring(1);
if (pathname == "") {
pathname = import.meta.url.replace("file://", "");
}
if (!existsSync(pathname)) {
return new Response(null, { status: 404 });
}
const stats = statSync(pathname);
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
const headers = new Headers({
"Content-Length": "" + stats.size,
"Last-Modified": stats.mtime.toUTCString(),
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
});
if (req.headers.get("if-none-match") === headers.get("ETag")) {
return new Response(null, { status: 304 });
}
const opts = { code: 200, start: 0, end: Infinity, range: false };
if (req.headers.has("range")) {
opts.code = 206;
let [x, y] = req.headers.get("range").replace("bytes=", "").split("-");
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
let start = (opts.start = parseInt(x, 10) || 0);
if (start >= stats.size || end >= stats.size) {
headers.set("Content-Range", `bytes */${stats.size}`);
return new Response(null, {
headers: headers,
status: 416,
});
}
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
headers.set("Content-Length", "" + (end - start + 1));
headers.set("Accept-Ranges", "bytes");
opts.range = true;
}
if (opts.range) {
return new Response(file(pathname).slice(opts.start, opts.end), {
headers,
status: opts.code,
});
}
return new Response(file(pathname), { headers, status: opts.code });
},
// this is called when fetch() throws or rejects
// error(err: Error) {
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
// },
// this boolean enables the bun's default error handler
// sometime after the initial release, it will auto reload as well
development: process.env.NODE_ENV !== "production",
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
// SSL is enabled if these two are set
// certFile: './cert.pem',
// keyFile: './key.pem',
port: 3000, // number or string
hostname: "localhost", // defaults to 0.0.0.0
});

View File

@@ -1,24 +0,0 @@
// A simple way to connect FileSystemRouter to Bun#serve
// run with `bun run index.tsx`
import { renderToReadableStream } from 'react-dom/server'
import { FileSystemRouter } from 'bun'
export default {
port: 3000,
async fetch(request: Request) {
const router = new FileSystemRouter({
dir: process.cwd() + "/pages",
style: "nextjs"
})
const route = router.match(request)
const { default: Root } = await import(route.filePath)
return new Response(
await renderToReadableStream(
<Root {...route.params} />
)
)
}
}

View File

@@ -1,14 +0,0 @@
{
"name": "react-routes",
"module": "index.tsx",
"type": "module",
"devDependencies": {
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"bun-types": "^0.4.0"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
}
}

View File

@@ -1,17 +0,0 @@
// reachable from http://localhost:3000/
export default () => (
<html>
<head>
<title>index</title>
</head>
<body>
<h1>
<a href="/one" >one</a>
</h1>
<h1>
<a href="/two" >two</a>
</h1>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/one
export default () => (
<html>
<head>
<title>one</title>
</head>
<body>
<p>one</p>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/two
export default () => (
<html>
<head>
<title>two</title>
</head>
<body>
<p>two</p>
</body>
</html>
)

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"strict": false,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,12 +1,14 @@
{
"dependencies": {
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"express": "^4.18.2",
"mitata": "^0.1.3",
"peechy": "latest",
"prettier": "^2.4.1",
"react": "next",
"react-dom": "next",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"prettier": "^2.4.1",
"svelte": "^3.52.0",
"typescript": "latest"
},
"private": true,
@@ -23,7 +25,7 @@
"@types/react": "^18.0.25",
"@typescript-eslint/eslint-plugin": "^5.31.0",
"@typescript-eslint/parser": "^5.31.0",
"bun-webkit": "0.0.1-dd868651f5c801985460c1128ae196d1edca9925"
"bun-webkit": "latest"
},
"version": "0.0.0"
}

230
packages/bun-error/package-lock.json generated Normal file
View File

@@ -0,0 +1,230 @@
{
"name": "bun-error",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
"integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==",
"dev": true
},
"@types/react": {
"version": "17.0.47",
"resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.47.tgz",
"integrity": "sha512-mk0BL8zBinf2ozNr3qPnlu1oyVTYq+4V7WA76RgxUAtf0Em/Wbid38KN6n4abEkvO4xMTBWmnP1FtQzgkEiJoA==",
"dev": true,
"requires": {
"@types/prop-types": "*",
"@types/scheduler": "*",
"csstype": "^3.0.2"
}
},
"@types/scheduler": {
"version": "0.16.2",
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
"integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==",
"dev": true
},
"csstype": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz",
"integrity": "sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==",
"dev": true
},
"esbuild": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz",
"integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==",
"requires": {
"esbuild-android-64": "0.14.48",
"esbuild-android-arm64": "0.14.48",
"esbuild-darwin-64": "0.14.48",
"esbuild-darwin-arm64": "0.14.48",
"esbuild-freebsd-64": "0.14.48",
"esbuild-freebsd-arm64": "0.14.48",
"esbuild-linux-32": "0.14.48",
"esbuild-linux-64": "0.14.48",
"esbuild-linux-arm": "0.14.48",
"esbuild-linux-arm64": "0.14.48",
"esbuild-linux-mips64le": "0.14.48",
"esbuild-linux-ppc64le": "0.14.48",
"esbuild-linux-riscv64": "0.14.48",
"esbuild-linux-s390x": "0.14.48",
"esbuild-netbsd-64": "0.14.48",
"esbuild-openbsd-64": "0.14.48",
"esbuild-sunos-64": "0.14.48",
"esbuild-windows-32": "0.14.48",
"esbuild-windows-64": "0.14.48",
"esbuild-windows-arm64": "0.14.48"
}
},
"esbuild-android-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz",
"integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==",
"optional": true
},
"esbuild-android-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz",
"integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==",
"optional": true
},
"esbuild-darwin-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz",
"integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==",
"optional": true
},
"esbuild-darwin-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz",
"integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==",
"optional": true
},
"esbuild-freebsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz",
"integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==",
"optional": true
},
"esbuild-freebsd-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz",
"integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==",
"optional": true
},
"esbuild-linux-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz",
"integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==",
"optional": true
},
"esbuild-linux-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz",
"integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==",
"optional": true
},
"esbuild-linux-arm": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz",
"integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==",
"optional": true
},
"esbuild-linux-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz",
"integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==",
"optional": true
},
"esbuild-linux-mips64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz",
"integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==",
"optional": true
},
"esbuild-linux-ppc64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz",
"integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==",
"optional": true
},
"esbuild-linux-riscv64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz",
"integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==",
"optional": true
},
"esbuild-linux-s390x": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz",
"integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==",
"optional": true
},
"esbuild-netbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz",
"integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==",
"optional": true
},
"esbuild-openbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz",
"integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==",
"optional": true
},
"esbuild-sunos-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz",
"integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==",
"optional": true
},
"esbuild-windows-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz",
"integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==",
"optional": true
},
"esbuild-windows-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz",
"integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==",
"optional": true
},
"esbuild-windows-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz",
"integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==",
"optional": true
},
"js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"requires": {
"js-tokens": "^3.0.0 || ^4.0.0"
}
},
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
"react": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
"integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
},
"react-dom": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz",
"integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1",
"scheduler": "^0.20.2"
}
},
"scheduler": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz",
"integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
}
}
}

View File

@@ -1,6 +0,0 @@
.DS_Store
.env
node_modules
/npm/**/bin
/npm/**/*.js
/npm/**/.npmrc

View File

@@ -1,2 +0,0 @@
@oven:registry=https://registry.npmjs.org/
//registry.npmjs.org/:_authToken=${NPM_TOKEN}

View File

@@ -1,15 +0,0 @@
# bun-release
Scripts that release Bun to npm, Dockerhub, Homebrew, etc.
### Running
```sh
bun run npm # build assets for the latest release
bun run npm -- <release> # build assets for the provided release
bun run npm -- <release> [dry-run|publish] # build and publish assets to npm
```
### Credits
- [esbuild](https://github.com/evanw/esbuild), for its npm scripts which this was largely based off of.

Binary file not shown.

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-aarch64",
"version": "0.5.3",
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64-baseline",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-aarch64",
"version": "0.5.3",
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64-baseline",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,31 +0,0 @@
# Bun
Bun is a fast all-in-one JavaScript runtime. https://bun.sh
### Install
```sh
npm install -g bun
```
### Upgrade
```sh
bun upgrade
```
### Supported Platforms
- [macOS, arm64 (Apple Silicon)](https://www.npmjs.com/package/@oven/bun-darwin-aarch64)
- [macOS, x64](https://www.npmjs.com/package/@oven/bun-darwin-x64)
- [macOS, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-darwin-x64-baseline)
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
### Future Platforms
- [Windows](https://github.com/oven-sh/bun/issues/43)
- Unix-like variants such as FreeBSD, OpenBSD, etc.
- Android and iOS

View File

@@ -1,42 +0,0 @@
{
"name": "bun",
"version": "0.5.3",
"description": "Bun is a fast all-in-one JavaScript runtime.",
"keywords": [
"bun",
"bun.js",
"node",
"node.js",
"runtime",
"bundler",
"transpiler",
"typescript"
],
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"bin": {
"bun": "bin/bun",
"bunx": "bin/bun"
},
"repository": "https://github.com/oven-sh/bun",
"scripts": {
"postinstall": "node install.js"
},
"optionalDependencies": {
"@oven/bun-darwin-aarch64": "0.5.3",
"@oven/bun-darwin-x64": "0.5.3",
"@oven/bun-darwin-x64-baseline": "0.5.3",
"@oven/bun-linux-aarch64": "0.5.3",
"@oven/bun-linux-x64": "0.5.3",
"@oven/bun-linux-x64-baseline": "0.5.3"
},
"os": [
"darwin",
"linux"
],
"cpu": [
"arm64",
"x64"
]
}

View File

@@ -1,19 +0,0 @@
{
"private": true,
"dependencies": {
"esbuild": "^0.17.3",
"jszip": "^3.10.1",
"octokit": "^2.0.14"
},
"devDependencies": {
"@octokit/types": "^8.1.1",
"bun-types": "^0.4.0",
"prettier": "^2.8.2"
},
"scripts": {
"format": "prettier --write src scripts",
"get-version": "bun scripts/get-version.ts",
"upload-npm": "bun scripts/upload-npm.ts",
"upload-assets": "bun scripts/upload-assets.ts"
}
}

View File

@@ -1,5 +0,0 @@
import { log } from "../src/console";
import { getSemver } from "../src/github";
log(await getSemver(process.argv[2]));
process.exit(0); // HACK

View File

@@ -1,13 +0,0 @@
import { importBun } from "../src/npm/install";
import { execFileSync } from "child_process";
importBun()
.then(bun => {
return execFileSync(bun, process.argv.slice(2), {
stdio: "inherit",
});
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,10 +0,0 @@
import { importBun, optimizeBun } from "../src/npm/install";
importBun()
.then(path => {
optimizeBun(path);
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,92 +0,0 @@
import { getRelease, uploadAsset } from "../src/github";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import { confirm, exit, log, stdin, warn } from "../src/console";
import { hash, join, rm, tmp, write, basename, blob } from "../src/fs";
const [tag, ...paths] = process.argv.slice(2);
if (!tag) {
exit("Invalid arguments: [tag] [...assets]");
}
const { tag_name, assets } = await getRelease(tag);
log("Release:", tag_name, "\n");
log("Existing assets:\n", ...assets.map(({ name }) => `- ${name}\n`));
log("Updating assets:\n", ...paths.map(path => `+ ${basename(path)}\n`));
await confirm();
log("Hashing assets...\n");
const existing: Map<string, string> = new Map();
for (const { name, browser_download_url } of assets) {
if (name.startsWith("SHASUMS256.txt")) {
continue;
}
const response = await fetch(browser_download_url);
const buffer = Buffer.from(await response.arrayBuffer());
existing.set(name, await hash(buffer));
}
const updated: Map<string, string> = new Map();
for (const path of paths) {
const name = basename(path);
updated.set(name, await hash(path));
}
log(
"Unchanged hashes:\n",
...Array.from(existing.entries())
.filter(([name]) => !updated.has(name))
.map(([name, sha256]) => ` - ${sha256} => ${name}\n`),
);
log("Changed hashes:\n", ...Array.from(updated.entries()).map(([name, sha256]) => ` + ${sha256} => ${name}\n`));
await confirm();
log("Signing assets...\n");
const cwd = tmp();
const path = join(cwd, "SHASUMS256.txt");
const signedPath = `${path}.asc`;
write(
path,
[...Array.from(updated.entries()), ...Array.from(existing.entries()).filter(([name]) => !updated.has(name))]
.sort(([a], [b]) => a.localeCompare(b))
.map(([name, sha256]) => `${sha256} ${name}`)
.join("\n"),
);
const { stdout: keys } = spawn("gpg", ["--list-secret-keys", "--keyid-format", "long"]);
const verifiedKeys = [
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59", // robobun@oven.sh
];
if (!verifiedKeys.find(key => keys.includes(key))) {
warn("Signature is probably wrong, key not found: robobun@oven.sh");
}
const passphrase = await stdin("Passphrase:");
log();
const { exitCode, stdout, stderr } = spawn(
"gpg",
["--pinentry-mode", "loopback", "--passphrase-fd", "0", "--clearsign", "--output", signedPath, path],
{
// @ts-ignore
input: passphrase,
stdout: "inherit",
stderr: "inherit",
},
);
if (exitCode !== 0) {
exit(stdout || stderr);
}
const uploads = [...paths, path, signedPath];
log("Uploading assets:\n", ...uploads.map(path => ` + ${basename(path)}\n`));
await confirm();
for (const path of uploads) {
const name = basename(path);
await uploadAsset(tag_name, name, blob(path));
}
try {
rm(cwd);
} catch {
warn("Failed to cleanup:", cwd, "\n");
}
log("Done");
process.exit(0); // FIXME

View File

@@ -1,164 +0,0 @@
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import type { Platform } from "../src/platform";
import { platforms } from "../src/platform";
import { getSemver } from "../src/github";
import { getRelease } from "../src/github";
import type { BuildOptions } from "esbuild";
import { buildSync, formatMessagesSync } from "esbuild";
import type { JSZipObject } from "jszip";
import { loadAsync } from "jszip";
import { debug, log, error } from "../src/console";
const module = "bun";
const owner = "@oven";
let version: string;
const [tag, action] = process.argv.slice(2);
await build(tag);
if (action === "publish") {
await publish();
} else if (action === "dry-run") {
await publish(true);
} else if (action) {
throw new Error(`Unknown action: ${action}`);
}
process.exit(0); // HACK
async function build(tag?: string): Promise<void> {
const release = await getRelease(tag);
version = await getSemver(release.tag_name);
await buildRootModule();
for (const platform of platforms) {
await buildModule(release, platform);
}
}
async function publish(dryRun?: boolean): Promise<void> {
const modules = platforms.map(({ bin }) => `${owner}/${bin}`);
modules.push(module);
for (const module of modules) {
publishModule(module, dryRun);
}
}
async function buildRootModule() {
log("Building:", `${module}@${version}`);
const cwd = join("npm", module);
const define = {
version: `"${version}"`,
module: `"${module}"`,
owner: `"${owner}"`,
};
bundle(join("scripts", "npm-postinstall.ts"), join(cwd, "install.js"), {
define,
});
bundle(join("scripts", "npm-exec.ts"), join(cwd, "bin", "bun"), {
define,
banner: {
js: "#!/usr/bin/env node",
},
});
const os = [...new Set(platforms.map(({ os }) => os))];
const cpu = [...new Set(platforms.map(({ arch }) => arch))];
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
scripts: {
postinstall: "node install.js",
},
optionalDependencies: Object.fromEntries(platforms.map(({ bin }) => [`${owner}/${bin}`, version])),
bin: {
bun: "bin/bun",
bunx: "bin/bun",
},
os,
cpu,
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
async function buildModule(
release: Awaited<ReturnType<typeof getRelease>>,
{ bin, exe, os, arch }: Platform,
): Promise<void> {
const module = `${owner}/${bin}`;
log("Building:", `${module}@${version}`);
const asset = release.assets.find(({ name }) => name === `${bin}.zip`);
if (!asset) {
error(`No asset found: ${bin}`);
return;
}
const bun = await extractFromZip(asset.browser_download_url, `${bin}/bun`);
const cwd = join("npm", module);
write(join(cwd, exe), await bun.async("arraybuffer"));
chmod(join(cwd, exe), 0o755);
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
preferUnplugged: true,
os: [os],
cpu: [arch],
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
function publishModule(name: string, dryRun?: boolean): void {
log(dryRun ? "Dry-run Publishing:" : "Publishing:", `${name}@${version}`);
const { exitCode, stdout, stderr } = spawn(
"npm",
[
"publish",
"--access",
"public",
"--tag",
version.includes("canary") ? "canary" : "latest",
...(dryRun ? ["--dry-run"] : []),
],
{
cwd: join("npm", name),
},
);
if (exitCode === 0) {
error(stderr || stdout);
}
}
async function extractFromZip(url: string, filename: string): Promise<JSZipObject> {
const response = await fetch(url);
const buffer = await response.arrayBuffer();
const zip = await loadAsync(buffer);
for (const [name, file] of Object.entries(zip.files)) {
if (!file.dir && name.startsWith(filename)) {
return file;
}
}
debug("Found files:", Object.keys(zip.files));
throw new Error(`File not found: ${filename}`);
}
function bundle(src: string, dst: string, options: BuildOptions = {}): void {
const { errors } = buildSync({
bundle: true,
treeShaking: true,
keepNames: true,
minifySyntax: true,
pure: ["console.debug"],
platform: "node",
target: "es6",
format: "cjs",
entryPoints: [src],
outfile: dst,
...options,
});
if (errors?.length) {
const messages = formatMessagesSync(errors, { kind: "error" });
throw new Error(messages.join("\n"));
}
}

View File

@@ -1,75 +0,0 @@
import { isatty } from "tty";
import { createInterface } from "readline";
export const isAction = !!process.env["GITHUB_ACTION"];
export const isDebug =
process.env["DEBUG"] === "1" || process.env["LOG_LEVEL"] === "debug" || process.env["RUNNER_DEBUG"] === "1";
export function debug(...message: any[]): void {
if (isAction) {
console.debug("::debug::", ...message);
} else if (isDebug) {
console.debug(...message);
}
}
export function log(...message: any[]): void {
console.log(...message);
}
export function warn(...message: any[]): void {
if (isAction) {
console.warn("::warning::", ...message);
} else {
console.warn(...message);
}
}
export function error(...message: any[]): void {
if (isAction) {
console.error("::error::", ...message);
} else {
console.error(...message);
}
}
export function exit(...message: any[]): never {
error(...message);
process.exit(1);
}
export function isTty(): boolean {
return isatty(process.stdout.fd);
}
export async function stdin(question: string): Promise<string> {
if (isTty()) {
return prompt(question) || "";
}
const reader = createInterface({
input: process.stdin,
terminal: false,
});
let buffer = "";
reader.on("line", line => {
buffer += line;
});
return new Promise(resolve => {
reader.once("close", () => resolve(buffer));
});
}
export async function confirm(message?: string): Promise<void> {
if (!isTty()) {
return;
}
const question = message ?? "Confirm?";
switch (prompt(`${question} [y/n]`)) {
case "y":
case "Y":
log();
return;
}
exit();
}

View File

@@ -1,70 +0,0 @@
import { debug, isDebug } from "./console";
export const fetch = "fetch" in globalThis ? webFetch : nodeFetch;
type Options = RequestInit & { assert?: boolean };
async function webFetch(url: string, options: Options = {}): Promise<Response> {
debug("fetch request", url, options);
const response = await globalThis.fetch(url, options, { verbose: isDebug });
debug("fetch response", response);
if (options?.assert !== false && !isOk(response.status)) {
try {
debug(await response.text());
} catch {}
throw new Error(`${response.status}: ${url}`);
}
return response;
}
async function nodeFetch(url: string, options: Options = {}): Promise<Response> {
const { get } = await import("node:http");
return new Promise((resolve, reject) => {
get(url, response => {
debug("http.get", url, response.statusCode);
const status = response.statusCode ?? 501;
if (response.headers.location && isRedirect(status)) {
return nodeFetch(url).then(resolve, reject);
}
if (options?.assert !== false && !isOk(status)) {
return reject(new Error(`${status}: ${url}`));
}
const body: Buffer[] = [];
response.on("data", chunk => {
body.push(chunk);
});
response.on("end", () => {
resolve({
ok: isOk(status),
status,
async arrayBuffer() {
return Buffer.concat(body).buffer as ArrayBuffer;
},
async text() {
return Buffer.concat(body).toString("utf-8");
},
async json() {
const text = Buffer.concat(body).toString("utf-8");
return JSON.parse(text);
},
} as Response);
});
}).on("error", reject);
});
}
function isOk(status: number): boolean {
return status >= 200 && status <= 204;
}
function isRedirect(status: number): boolean {
switch (status) {
case 301: // Moved Permanently
case 308: // Permanent Redirect
case 302: // Found
case 307: // Temporary Redirect
case 303: // See Other
return true;
}
return false;
}

View File

@@ -1,159 +0,0 @@
import path from "path";
import fs from "fs";
import os from "os";
import crypto from "crypto";
import { debug } from "./console";
export function join(...paths: (string | string[])[]): string {
return path.join(...paths.flat(2));
}
export function basename(...paths: (string | string[])[]): string {
return path.basename(join(...paths));
}
export function tmp(): string {
const tmpdir = process.env["RUNNER_TEMP"] ?? os.tmpdir();
const dir = fs.mkdtempSync(join(tmpdir, "bun-"));
debug("tmp", dir);
return dir;
}
export function rm(path: string): void {
debug("rm", path);
try {
fs.rmSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmSync failed", error);
// Did not exist before Node.js v14.
// Attempt again with older, slower implementation.
}
let stats: fs.Stats;
try {
stats = fs.lstatSync(path);
} catch (error) {
debug("fs.lstatSync failed", error);
// The file was likely deleted, so return early.
return;
}
if (!stats.isDirectory()) {
fs.unlinkSync(path);
return;
}
try {
fs.rmdirSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmdirSync failed", error);
// Recursive flag did not exist before Node.js X.
// Attempt again with older, slower implementation.
}
for (const filename of fs.readdirSync(path)) {
rm(join(path, filename));
}
fs.rmdirSync(path);
}
export function rename(path: string, newPath: string): void {
debug("rename", path, newPath);
try {
fs.renameSync(path, newPath);
return;
} catch (error) {
debug("fs.renameSync failed", error);
// If there is an error, delete the new path and try again.
}
try {
rm(newPath);
} catch (error) {
debug("rm failed", error);
// The path could have been deleted already.
}
fs.renameSync(path, newPath);
}
export function write(dst: string, content: string | ArrayBuffer | ArrayBufferView): void {
debug("write", dst);
try {
fs.writeFileSync(dst, content);
return;
} catch (error) {
debug("fs.writeFileSync failed", error);
// If there is an error, ensure the parent directory
// exists and try again.
try {
fs.mkdirSync(path.dirname(dst), { recursive: true });
} catch (error) {
debug("fs.mkdirSync failed", error);
// The directory could have been created already.
}
fs.writeFileSync(dst, content);
}
}
export function writeJson(path: string, json: object, force?: boolean): void {
let value = json;
if (!force && exists(path)) {
try {
const existing = JSON.parse(read(path));
value = {
...existing,
...json,
};
} catch {
value = json;
}
}
write(path, `${JSON.stringify(value, undefined, 2)}\n`);
}
export function read(path: string): string {
debug("read", path);
return fs.readFileSync(path, "utf-8");
}
export function blob(path: string): Blob {
debug("blob", path);
if ("Bun" in globalThis) {
return Bun.file(path);
}
const buffer = fs.readFileSync(path);
return new Blob([buffer], {
type: path.endsWith(".zip") ? "application/zip" : path.endsWith(".txt") ? "text/plain" : "application/octet-stream",
});
}
export function hash(content: string | crypto.BinaryLike): string {
debug("hash", content);
return crypto
.createHash("sha256")
.update(typeof content === "string" ? fs.readFileSync(content) : content)
.digest("hex");
}
export function chmod(path: string, mode: fs.Mode): void {
debug("chmod", path, mode);
fs.chmodSync(path, mode);
}
export function copy(path: string, newPath: string): void {
debug("copy", path, newPath);
try {
fs.copyFileSync(path, newPath);
return;
} catch (error) {
debug("fs.copyFileSync failed", error);
}
write(newPath, read(path));
}
export function exists(path: string): boolean {
debug("exists", path);
try {
return fs.existsSync(path);
} catch (error) {
debug("fs.existsSync failed", error);
}
return false;
}

View File

@@ -1,120 +0,0 @@
import type { Endpoints, RequestParameters, Route } from "@octokit/types";
import { Octokit } from "octokit";
import { fetch } from "./fetch";
import { debug, log, warn, error } from "./console";
const [owner, repo] = process.env["GITHUB_REPOSITORY"]?.split("/") ?? ["oven-sh", "bun"];
const octokit = new Octokit({
auth: process.env["GITHUB_TOKEN"],
request: {
fetch,
},
log: {
debug,
info: log,
warn,
error,
},
});
export async function github<R extends Route>(
url: R | keyof Endpoints,
options?: Omit<
R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters,
"owner" | "repo"
>,
): Promise<R extends keyof Endpoints ? Endpoints[R]["response"]["data"] : unknown> {
// @ts-ignore
const { data } = await octokit.request(url, {
owner,
repo,
...options,
});
return data;
}
export async function getRelease(tag?: string) {
if (!tag) {
return github("GET /repos/{owner}/{repo}/releases/latest");
}
return github("GET /repos/{owner}/{repo}/releases/tags/{tag}", {
tag: formatTag(tag),
});
}
export async function uploadAsset(tag: string, name: string, blob: Blob) {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
// Github requires that existing assets are deleted before uploading
// a new asset, but does not provide a rename or re-upload API?!?
if (asset) {
await github("DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", {
asset_id: asset.id,
});
}
return github("POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
baseUrl: "https://uploads.github.com",
release_id: release.id,
name,
headers: {
"content-type": blob.type,
"content-length": blob.size,
},
data: Buffer.from(await blob.arrayBuffer()),
});
}
export async function downloadAsset(tag: string, name: string): Promise<Blob> {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
if (!asset) {
throw new Error(`Asset not found: ${name}`);
}
const response = await fetch(asset.browser_download_url);
return response.blob();
}
export async function getSha(tag: string, format?: "short" | "long") {
const ref = formatTag(tag);
const {
object: { sha },
} = await github("GET /repos/{owner}/{repo}/git/ref/{ref}", {
ref: ref === "canary" ? "heads/main" : `tags/${ref}`,
});
return format === "short" ? sha.substring(0, 7) : sha;
}
export async function getBuild(): Promise<number> {
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
const response = await fetch("https://registry.npmjs.org/-/package/bun/dist-tags");
const { canary }: { canary: string } = await response.json();
if (!canary.includes(date)) {
return 1;
}
const match = /canary.[0-9]{8}\.([0-9]+)+?/.exec(canary);
return match ? 1 + parseInt(match[1]) : 1;
}
export async function getSemver(tag?: string, build?: number): Promise<string> {
const { tag_name } = await getRelease(tag);
if (tag_name !== "canary") {
return tag_name.replace("bun-v", "");
}
if (build === undefined) {
build = await getBuild();
}
const sha = await getSha(tag_name, "short");
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
return `${Bun.version}-canary.${date}.${build}+${sha}`;
}
export function formatTag(tag: string): string {
if (tag === "canary" || tag.startsWith("bun-v")) {
return tag;
}
if (tag.startsWith("v")) {
return tag.slice(1);
}
return `bun-v${tag}`;
}

View File

@@ -1,144 +0,0 @@
import { fetch } from "../fetch";
import { spawn } from "../spawn";
import { chmod, join, rename, rm, tmp, write } from "../fs";
import { unzipSync } from "zlib";
import type { Platform } from "../platform";
import { os, arch, supportedPlatforms } from "../platform";
import { debug, error } from "../console";
declare const version: string;
declare const module: string;
declare const owner: string;
export async function importBun(): Promise<string> {
if (!supportedPlatforms.length) {
throw new Error(`Unsupported platform: ${os} ${arch}`);
}
for (const platform of supportedPlatforms) {
try {
return await requireBun(platform);
} catch (error) {
debug("requireBun failed", error);
}
}
throw new Error(`Failed to install package "${module}"`);
}
async function requireBun(platform: Platform): Promise<string> {
const module = `${owner}/${platform.bin}`;
function resolveBun() {
const exe = require.resolve(join(module, platform.exe));
const { exitCode, stderr, stdout } = spawn(exe, ["--version"]);
if (exitCode === 0) {
return exe;
}
throw new Error(stderr || stdout);
}
try {
return resolveBun();
} catch (cause) {
debug("resolveBun failed", cause);
error(
`Failed to find package "${module}".`,
`You may have used the "--no-optional" flag when running "npm install".`,
);
}
const cwd = join("node_modules", module);
try {
installBun(platform, cwd);
} catch (cause) {
debug("installBun failed", cause);
error(`Failed to install package "${module}" using "npm install".`, cause);
try {
await downloadBun(platform, cwd);
} catch (cause) {
debug("downloadBun failed", cause);
error(`Failed to download package "${module}" from "registry.npmjs.org".`, cause);
}
}
return resolveBun();
}
function installBun(platform: Platform, dst: string): void {
const module = `${owner}/${platform.bin}`;
const cwd = tmp();
try {
write(join(cwd, "package.json"), "{}");
const { exitCode } = spawn(
"npm",
["install", "--loglevel=error", "--prefer-offline", "--no-audit", "--progress=false", `${module}@${version}`],
{
cwd,
stdio: "pipe",
env: {
...process.env,
npm_config_global: undefined,
},
},
);
if (exitCode === 0) {
rename(join(cwd, "node_modules", module), dst);
}
} finally {
try {
rm(cwd);
} catch (error) {
debug("rm failed", error);
// There is nothing to do if the directory cannot be cleaned up.
}
}
}
async function downloadBun(platform: Platform, dst: string): Promise<void> {
const response = await fetch(`https://registry.npmjs.org/${owner}/${platform.bin}/-/${platform.bin}-${version}.tgz`);
const tgz = await response.arrayBuffer();
let buffer: Buffer;
try {
buffer = unzipSync(tgz);
} catch (cause) {
throw new Error("Invalid gzip data", { cause });
}
function str(i: number, n: number): string {
return String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, "");
}
let offset = 0;
while (offset < buffer.length) {
const name = str(offset, 100).replace("package/", "");
const size = parseInt(str(offset + 124, 12), 8);
offset += 512;
if (!isNaN(size)) {
write(join(dst, name), buffer.subarray(offset, offset + size));
if (name === platform.exe) {
try {
chmod(join(dst, name), 0o755);
} catch (error) {
debug("chmod failed", error);
}
}
offset += (size + 511) & ~511;
}
}
}
export function optimizeBun(path: string): void {
if (os === "win32") {
throw new Error(
"You must use Windows Subsystem for Linux, aka. WSL, to run bun. Learn more: https://learn.microsoft.com/en-us/windows/wsl/install",
);
}
const { npm_config_user_agent } = process.env;
if (npm_config_user_agent && /\byarn\//.test(npm_config_user_agent)) {
throw new Error(
"Yarn does not support bun, because it does not allow linking to binaries. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}
try {
rename(path, join(__dirname, "bin", "bun"));
return;
} catch (error) {
debug("optimizeBun failed", error);
}
throw new Error(
"Your package manager doesn't seem to support bun. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}

View File

@@ -1,91 +0,0 @@
import { spawn } from "./spawn";
import { read } from "./fs";
import { debug } from "./console";
export const os = process.platform;
export const arch = os === "darwin" && process.arch === "x64" && isRosetta2() ? "arm64" : process.arch;
export const avx2 = (arch === "x64" && os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2());
export type Platform = {
os: string;
arch: string;
avx2?: boolean;
bin: string;
exe: string;
};
export const platforms: Platform[] = [
{
os: "darwin",
arch: "arm64",
bin: "bun-darwin-aarch64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
avx2: true,
bin: "bun-darwin-x64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
bin: "bun-darwin-x64-baseline",
exe: "bin/bun",
},
{
os: "linux",
arch: "arm64",
bin: "bun-linux-aarch64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
avx2: true,
bin: "bun-linux-x64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
bin: "bun-linux-x64-baseline",
exe: "bin/bun",
},
];
export const supportedPlatforms: Platform[] = platforms
.filter(platform => platform.os === os && platform.arch === arch && (!platform.avx2 || avx2))
.sort((a, b) => (a.avx2 === b.avx2 ? 0 : a.avx2 ? -1 : 1));
function isLinuxAVX2(): boolean {
try {
return read("/proc/cpuinfo").includes("avx2");
} catch (error) {
debug("isLinuxAVX2 failed", error);
return false;
}
}
function isDarwinAVX2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "machdep.cpu"]);
return exitCode === 0 && stdout.includes("AVX2");
} catch (error) {
debug("isDarwinAVX2 failed", error);
return false;
}
}
function isRosetta2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "sysctl.proc_translated"]);
return exitCode === 0 && stdout.includes("1");
} catch (error) {
debug("isRosetta2 failed", error);
return false;
}
}

View File

@@ -1,24 +0,0 @@
import child_process from "child_process";
import { debug } from "./console";
export function spawn(
cmd: string,
args: string[],
options: child_process.SpawnOptions = {},
): {
exitCode: number;
stdout: string;
stderr: string;
} {
debug("spawn", [cmd, ...args].join(" "));
const { status, stdout, stderr } = child_process.spawnSync(cmd, args, {
stdio: "pipe",
encoding: "utf-8",
...options,
});
return {
exitCode: status ?? 1,
stdout,
stderr,
};
}

View File

@@ -1,17 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src",
"scripts"
]
}

View File

@@ -1,3 +0,0 @@
.DS_Store
.env
node_modules

View File

@@ -1,3 +0,0 @@
# bun-test
Scripts to run Bun's tests using `bun wiptest`.

Binary file not shown.

View File

@@ -1,13 +0,0 @@
{
"private": true,
"dependencies": {
"@actions/core": "^1.10.0"
},
"devDependencies": {
"bun-types": "canary",
"prettier": "^2.8.2"
},
"scripts": {
"test": "bun run src/runner.ts"
}
}

View File

@@ -1,86 +0,0 @@
import { spawn } from "bun";
import { readdirSync } from "node:fs";
import { resolve } from "node:path";
import * as action from "@actions/core";
const cwd = resolve("../..");
const isAction = !!process.env["GITHUB_ACTION"];
const errorPattern = /error: ([\S\s]*?)(?=\n.*?at (\/.*):(\d+):(\d+))/mgi;
function* findTests(dir: string, query?: string): Generator<string> {
for (const entry of readdirSync(resolve(dir), { encoding: "utf-8", withFileTypes: true })) {
const path = resolve(dir, entry.name);
if (entry.isDirectory()) {
yield* findTests(path, query);
} else if (entry.isFile() && entry.name.includes(".test.")) {
yield path;
}
}
}
async function runTest(path: string): Promise<void> {
const name = path.replace(cwd, "").slice(1);
const runner = await spawn({
cwd,
cmd: ["bun", "wiptest", path],
stdout: "pipe",
stderr: "pipe",
});
const exitCode = await Promise.race([
new Promise((resolve) => {
setTimeout(() => {
runner.kill();
resolve(124); // Timed Out
}, 60_000);
}),
runner.exited,
]);
if (isAction) {
const prefix = exitCode === 0
? "PASS"
: `FAIL (exit code ${exitCode})`;
action.startGroup(`${prefix} - ${name}`);
}
for (const stdout of [runner.stdout, runner.stderr]) {
if (!stdout) {
continue;
}
const reader = stdout.getReader();
while (true) {
const { value, done } = await reader.read();
if (value) {
console.write(value);
if (isAction) {
findErrors(value);
}
}
if (done) {
break;
}
}
}
if (isAction) {
action.endGroup();
}
}
let failed = false;
function findErrors(data: Uint8Array): void {
const text = new TextDecoder().decode(data);
for (const [message, _, path, line, col] of text.matchAll(errorPattern)) {
failed = true;
action.error(message, {
file: path.replace(cwd, "").slice(1),
startLine: parseInt(line),
startColumn: parseInt(col),
});
}
}
const tests = [];
for (const path of findTests(resolve(cwd, "test/bun.js"))) {
tests.push(runTest(path).catch(console.error));
}
await Promise.allSettled(tests);
process.exit(failed ? 1 : 0);

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src"
]
}

View File

@@ -22,10 +22,10 @@ Add this to your `tsconfig.json` or `jsconfig.json`:
```jsonc
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "Node",
"lib": ["esnext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "node",
// "bun-types" is the important part
"types": ["bun-types"]
}

View File

@@ -2623,7 +2623,7 @@ declare module "bun" {
*/
builder: PluginBuilder,
): void | Promise<void>;
}): ReturnType<(typeof options)["setup"]>;
}): ReturnType<typeof options["setup"]>;
/**
* Deactivate all plugins
@@ -2761,26 +2761,11 @@ declare module "bun" {
interface TCPSocket extends Socket {}
interface TLSSocket extends Socket {}
type BinaryTypeList = {
arraybuffer: ArrayBuffer;
buffer: Buffer;
uint8array: Uint8Array;
// TODO: DataView
// dataview: DataView;
};
type BinaryType = keyof BinaryTypeList;
interface SocketHandler<
Data = unknown,
DataBinaryType extends BinaryType = "buffer",
> {
interface SocketHandler<Data = unknown> {
open(socket: Socket<Data>): void | Promise<void>;
close?(socket: Socket<Data>): void | Promise<void>;
error?(socket: Socket<Data>, error: Error): void | Promise<void>;
data?(
socket: Socket<Data>,
data: BinaryTypeList[DataBinaryType],
): void | Promise<void>;
data?(socket: Socket<Data>, data: BufferSource): void | Promise<void>;
drain?(socket: Socket<Data>): void | Promise<void>;
/**
@@ -2803,23 +2788,6 @@ declare module "bun" {
* to the promise rejection queue.
*/
connectError?(socket: Socket<Data>, error: Error): void | Promise<void>;
/**
* Choose what `ArrayBufferView` is returned in the {@link SocketHandler.data} callback.
*
* @default "buffer"
*
* @remarks
* This lets you select the desired binary type for the `data` callback.
* It's a small performance optimization to let you avoid creating extra
* ArrayBufferView objects when possible.
*
* Bun originally defaulted to `Uint8Array` but when dealing with network
* data, it's more useful to be able to directly read from the bytes which
* `Buffer` allows.
*
*/
binaryType?: BinaryType;
}
interface SocketOptions<Data = unknown> {
@@ -3115,7 +3083,6 @@ declare module "bun" {
* ```
*/
readonly params: Record<string, string>;
readonly filePath: string;
readonly pathname: string;
readonly query: Record<string, string>;
readonly name: string;

View File

@@ -318,127 +318,127 @@ declare module "dns" {
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "A",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "AAAA",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "ANY",
callback: (
err: ErrnoException | null,
addresses: AnyRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "CNAME",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "MX",
callback: (
err: ErrnoException | null,
addresses: MxRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NAPTR",
callback: (
err: ErrnoException | null,
addresses: NaptrRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NS",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "PTR",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SOA",
callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SRV",
callback: (
err: ErrnoException | null,
addresses: SrvRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "TXT",
callback: (
err: ErrnoException | null,
addresses: string[][],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: string,
callback: (
err: ErrnoException | null,
addresses:
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[],
) => void,
): void;
export namespace resolve {
function __promisify__(
hostname: string,
rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
): Promise<string[]>;
function __promisify__(
hostname: string,
rrtype: "ANY",
): Promise<AnyRecord[]>;
function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function __promisify__(
hostname: string,
rrtype: "NAPTR",
): Promise<NaptrRecord[]>;
function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function __promisify__(
hostname: string,
rrtype: "SRV",
): Promise<SrvRecord[]>;
function __promisify__(
hostname: string,
rrtype: "TXT",
): Promise<string[][]>;
function __promisify__(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
}
// export function resolve(
// hostname: string,
// rrtype: "A",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "AAAA",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "ANY",
// callback: (
// err: ErrnoException | null,
// addresses: AnyRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "CNAME",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "MX",
// callback: (
// err: ErrnoException | null,
// addresses: MxRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NAPTR",
// callback: (
// err: ErrnoException | null,
// addresses: NaptrRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NS",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "PTR",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SOA",
// callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SRV",
// callback: (
// err: ErrnoException | null,
// addresses: SrvRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "TXT",
// callback: (
// err: ErrnoException | null,
// addresses: string[][],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: string,
// callback: (
// err: ErrnoException | null,
// addresses:
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[],
// ) => void,
// ): void;
// export namespace resolve {
// function __promisify__(
// hostname: string,
// rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
// ): Promise<string[]>;
// function __promisify__(
// hostname: string,
// rrtype: "ANY",
// ): Promise<AnyRecord[]>;
// function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "NAPTR",
// ): Promise<NaptrRecord[]>;
// function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function __promisify__(
// hostname: string,
// rrtype: "SRV",
// ): Promise<SrvRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "TXT",
// ): Promise<string[][]>;
// function __promisify__(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
// }
/**
* Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function
* will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -512,38 +512,38 @@ declare module "dns" {
* will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`).
* @since v0.3.2
*/
export function resolveCname(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveCname {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveCname(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveCname {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function
* will contain an array of certification authority authorization records
* available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
export function resolveCaa(
hostname: string,
callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
): void;
export namespace resolveCaa {
function __promisify__(hostname: string): Promise<CaaRecord[]>;
}
// export function resolveCaa(
// hostname: string,
// callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
// ): void;
// export namespace resolveCaa {
// function __promisify__(hostname: string): Promise<CaaRecord[]>;
// }
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v0.1.27
*/
export function resolveMx(
hostname: string,
callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
): void;
export namespace resolveMx {
function __promisify__(hostname: string): Promise<MxRecord[]>;
}
// export function resolveMx(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
// ): void;
// export namespace resolveMx {
// function __promisify__(hostname: string): Promise<MxRecord[]>;
// }
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of
* objects with the following properties:
@@ -567,37 +567,37 @@ declare module "dns" {
* ```
* @since v0.9.12
*/
export function resolveNaptr(
hostname: string,
callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
): void;
export namespace resolveNaptr {
function __promisify__(hostname: string): Promise<NaptrRecord[]>;
}
// export function resolveNaptr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
// ): void;
// export namespace resolveNaptr {
// function __promisify__(hostname: string): Promise<NaptrRecord[]>;
// }
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`).
* @since v0.1.90
*/
export function resolveNs(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveNs {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveNs(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveNs {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of strings containing the reply records.
* @since v6.0.0
*/
export function resolvePtr(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolvePtr {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolvePtr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolvePtr {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. The `address` argument passed to the `callback` function will
@@ -624,13 +624,13 @@ declare module "dns" {
* ```
* @since v0.11.10
*/
export function resolveSoa(
hostname: string,
callback: (err: ErrnoException | null, address: SoaRecord) => void,
): void;
export namespace resolveSoa {
function __promisify__(hostname: string): Promise<SoaRecord>;
}
// export function resolveSoa(
// hostname: string,
// callback: (err: ErrnoException | null, address: SoaRecord) => void,
// ): void;
// export namespace resolveSoa {
// function __promisify__(hostname: string): Promise<SoaRecord>;
// }
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of objects with the following properties:
@@ -650,13 +650,13 @@ declare module "dns" {
* ```
* @since v0.1.27
*/
export function resolveSrv(
hostname: string,
callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
): void;
export namespace resolveSrv {
function __promisify__(hostname: string): Promise<SrvRecord[]>;
}
// export function resolveSrv(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
// ): void;
// export namespace resolveSrv {
// function __promisify__(hostname: string): Promise<SrvRecord[]>;
// }
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a
* two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -664,13 +664,13 @@ declare module "dns" {
* treated separately.
* @since v0.1.27
*/
export function resolveTxt(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[][]) => void,
): void;
export namespace resolveTxt {
function __promisify__(hostname: string): Promise<string[][]>;
}
// export function resolveTxt(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[][]) => void,
// ): void;
// export namespace resolveTxt {
// function __promisify__(hostname: string): Promise<string[][]>;
// }
/**
* Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query).
* The `ret` argument passed to the `callback` function will be an array containing
@@ -861,14 +861,14 @@ declare module "dns" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
/**
* The resolver instance will send its requests from the specified IP address.

View File

@@ -10,12 +10,12 @@ declare module "dns/promises" {
LookupOneOptions,
LookupAllOptions,
LookupOptions,
AnyRecord,
CaaRecord,
MxRecord,
NaptrRecord,
SoaRecord,
SrvRecord,
// AnyRecord,
// CaaRecord,
// MxRecord,
// NaptrRecord,
// SoaRecord,
// SrvRecord,
ResolveWithTtlOptions,
RecordWithTtl,
ResolveOptions,
@@ -134,30 +134,30 @@ declare module "dns/promises" {
* @param [rrtype='A'] Resource record type.
*/
function resolve(hostname: string): Promise<string[]>;
function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
function resolve(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
// function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
// function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
// function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
// function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function resolve(hostname: string, rrtype: "SRV"): Promise<SrvRecord[]>;
// function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
// function resolve(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
/**
* Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4
* addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -223,20 +223,20 @@ declare module "dns/promises" {
* certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
function resolveCaa(hostname: string): Promise<CaaRecord[]>;
// function resolveCaa(hostname: string): Promise<CaaRecord[]>;
/**
* Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success,
* the `Promise` is resolved with an array of canonical name records available for
* the `hostname` (e.g. `['bar.example.com']`).
* @since v10.6.0
*/
function resolveCname(hostname: string): Promise<string[]>;
// function resolveCname(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects
* containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v10.6.0
*/
function resolveMx(hostname: string): Promise<MxRecord[]>;
// function resolveMx(hostname: string): Promise<MxRecord[]>;
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array
* of objects with the following properties:
@@ -260,19 +260,19 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
// function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server
* records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`).
* @since v10.6.0
*/
function resolveNs(hostname: string): Promise<string[]>;
// function resolveNs(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings
* containing the reply records.
* @since v10.6.0
*/
function resolvePtr(hostname: string): Promise<string[]>;
// function resolvePtr(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. On success, the `Promise` is resolved with an object with the
@@ -299,7 +299,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSoa(hostname: string): Promise<SoaRecord>;
// function resolveSoa(hostname: string): Promise<SoaRecord>;
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with
* the following properties:
@@ -319,7 +319,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
// function resolveSrv(hostname: string): Promise<SrvRecord[]>;
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array
* of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -327,7 +327,7 @@ declare module "dns/promises" {
* treated separately.
* @since v10.6.0
*/
function resolveTxt(hostname: string): Promise<string[][]>;
// function resolveTxt(hostname: string): Promise<string[][]>;
/**
* Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an
* array of host names.
@@ -384,14 +384,14 @@ declare module "dns/promises" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
// setLocalAddress(ipv4?: string, ipv6?: string): void;
// setServers: typeof setServers;

View File

@@ -344,7 +344,7 @@ interface Process {
arch: Architecture;
platform: Platform;
argv: string[];
execArgv: string[];
// execArgv: string[];
env: Bun.Env;
/** Whether you are using Bun */
@@ -834,21 +834,6 @@ interface RequestInit {
timeout?: boolean;
}
interface FetchRequestInit extends RequestInit {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
verbose?: boolean;
/**
* Override http_proxy or HTTPS_PROXY
* This is a custom property that is not part of the Fetch API specification.
*/
proxy?: string;
}
/**
* [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) represents an HTTP request.
*
@@ -1253,8 +1238,19 @@ declare function clearTimeout(id?: number): void;
*
*/
declare function fetch(
url: string | URL,
init?: FetchRequestInit,
url: string,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
/**
@@ -1268,7 +1264,21 @@ declare function fetch(
*
*/
// tslint:disable-next-line:unified-signatures
declare function fetch(request: Request, init?: RequestInit): Promise<Response>;
declare function fetch(
request: Request,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
declare function queueMicrotask(callback: (...args: any[]) => void): void;
/**

View File

@@ -33,7 +33,6 @@
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline.js)
*/
declare module "readline" {
import { Readable, Writable } from "node:stream";
import { Abortable, EventEmitter } from "node:events";
import * as promises from "node:readline/promises";
@@ -104,8 +103,8 @@ declare module "readline" {
* @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface
*/
protected constructor(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
);
@@ -345,8 +344,8 @@ declare module "readline" {
) => void;
export type CompleterResult = [string[], string];
export interface ReadLineOptions {
input: Readable;
output?: Writable | undefined;
input: ReadableStream;
output?: WritableStream | undefined;
completer?: Completer | AsyncCompleter | undefined;
terminal?: boolean | undefined;
/**
@@ -406,8 +405,8 @@ declare module "readline" {
* @since v0.1.98
*/
export function createInterface(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;
@@ -534,7 +533,7 @@ declare module "readline" {
* @since v0.7.7
*/
export function emitKeypressEvents(
stream: Readable,
stream: ReadableStream,
readlineInterface?: Interface,
): void;
export type Direction = -1 | 0 | 1;
@@ -550,7 +549,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearLine(
stream: Writable,
stream: WritableStream,
dir: Direction,
callback?: () => void,
): boolean;
@@ -562,7 +561,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearScreenDown(
stream: Writable,
stream: WritableStream,
callback?: () => void,
): boolean;
/**
@@ -573,7 +572,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function cursorTo(
stream: Writable,
stream: WritableStream,
x: number,
y?: number,
callback?: () => void,
@@ -689,7 +688,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function moveCursor(
stream: Writable,
stream: WritableStream,
dx: number,
dy: number,
callback?: () => void,

View File

@@ -5,7 +5,6 @@
* @since v17.0.0
*/
declare module "readline/promises" {
import { Readable, Writable } from "node:stream";
import {
Interface as _Interface,
ReadLineOptions,
@@ -57,7 +56,7 @@ declare module "readline/promises" {
/**
* @param stream A TTY stream.
*/
constructor(stream: Writable, options?: { autoCommit?: boolean });
constructor(stream: WritableStream, options?: { autoCommit?: boolean });
/**
* The `rl.clearLine()` method adds to the internal list of pending action an action that clears current line of the associated `stream` in a specified direction identified by `dir`.
* Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor.
@@ -138,8 +137,8 @@ declare module "readline/promises" {
* ```
*/
function createInterface(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;

View File

@@ -11,10 +11,3 @@ declare global {
expectType<"WHATEVER">(process.env.WHATEVER);
export {};
new Bun.Transpiler({
macros: {
"react-relay": {
graphql: "bun-macro-relay/bun-macro-relay.tsx",
},
},
});

View File

@@ -10,18 +10,11 @@
*/
declare module "timers" {
class Timer {
ref(): void;
unref(): void;
hasRef(): boolean;
}
const _exported: {
clearTimeout: (timer: Timer | number) => void;
clearInterval: (timer: Timer | number) => void;
setInterval: (cb: CallableFunction, msDelay: number, ...args: any[]) => Timer;
setTimeout: (cb: CallableFunction, msDelay: number, ...args: any[]) => Timer;
setImmediate: (cb: CallableFunction, ...args: any[]) => Timer;
clearTimeout: typeof clearTimeout;
clearInterval: typeof clearInterval;
setTimeout: typeof setTimeout;
setInterval: typeof setInterval;
};
export = _exported;
}

View File

@@ -298,7 +298,6 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
if (!loaded) {
instance = Self{
.allocator = allocator,
.backing_buf_used = 0,
};
loaded = true;
}

View File

@@ -293,10 +293,10 @@ pub const GenerateHeader = struct {
}
_ = forOS();
const release = std.mem.span(&linux_os_name.release);
const sliced_string = Semver.SlicedString.init(release, release);
const result = Semver.Version.parse(sliced_string, bun.default_allocator);
var sliced_string = Semver.SlicedString.init(release, release);
var result = Semver.Version.parse(sliced_string, bun.default_allocator);
// we only care about major, minor, patch so we don't care about the string
return result.version.fill();
return result.version;
}
pub fn forLinux() Analytics.Platform {

1
src/api/.gitignore vendored
View File

@@ -1 +0,0 @@
*.wasm

319
src/api/schema.d.ts generated vendored
View File

@@ -702,90 +702,228 @@ export interface BunInstall {
global_bin_dir?: string;
}
export declare function encodeStackFrame(message: StackFrame, bb: ByteBuffer): void;
export declare function encodeStackFrame(
message: StackFrame,
bb: ByteBuffer,
): void;
export declare function decodeStackFrame(buffer: ByteBuffer): StackFrame;
export declare function encodeStackFramePosition(message: StackFramePosition, bb: ByteBuffer): void;
export declare function decodeStackFramePosition(buffer: ByteBuffer): StackFramePosition;
export declare function encodeSourceLine(message: SourceLine, bb: ByteBuffer): void;
export declare function encodeStackFramePosition(
message: StackFramePosition,
bb: ByteBuffer,
): void;
export declare function decodeStackFramePosition(
buffer: ByteBuffer,
): StackFramePosition;
export declare function encodeSourceLine(
message: SourceLine,
bb: ByteBuffer,
): void;
export declare function decodeSourceLine(buffer: ByteBuffer): SourceLine;
export declare function encodeStackTrace(message: StackTrace, bb: ByteBuffer): void;
export declare function encodeStackTrace(
message: StackTrace,
bb: ByteBuffer,
): void;
export declare function decodeStackTrace(buffer: ByteBuffer): StackTrace;
export declare function encodeJSException(message: JSException, bb: ByteBuffer): void;
export declare function encodeJSException(
message: JSException,
bb: ByteBuffer,
): void;
export declare function decodeJSException(buffer: ByteBuffer): JSException;
export declare function encodeProblems(message: Problems, bb: ByteBuffer): void;
export declare function decodeProblems(buffer: ByteBuffer): Problems;
export declare function encodeRouter(message: Router, bb: ByteBuffer): void;
export declare function decodeRouter(buffer: ByteBuffer): Router;
export declare function encodeFallbackMessageContainer(message: FallbackMessageContainer, bb: ByteBuffer): void;
export declare function decodeFallbackMessageContainer(buffer: ByteBuffer): FallbackMessageContainer;
export declare function encodeFallbackMessageContainer(
message: FallbackMessageContainer,
bb: ByteBuffer,
): void;
export declare function decodeFallbackMessageContainer(
buffer: ByteBuffer,
): FallbackMessageContainer;
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeStringPointer(message: StringPointer, bb: ByteBuffer): void;
export declare function encodeStringPointer(
message: StringPointer,
bb: ByteBuffer,
): void;
export declare function decodeStringPointer(buffer: ByteBuffer): StringPointer;
export declare function encodeJavascriptBundledModule(message: JavascriptBundledModule, bb: ByteBuffer): void;
export declare function decodeJavascriptBundledModule(buffer: ByteBuffer): JavascriptBundledModule;
export declare function encodeJavascriptBundledPackage(message: JavascriptBundledPackage, bb: ByteBuffer): void;
export declare function decodeJavascriptBundledPackage(buffer: ByteBuffer): JavascriptBundledPackage;
export declare function encodeJavascriptBundle(message: JavascriptBundle, bb: ByteBuffer): void;
export declare function decodeJavascriptBundle(buffer: ByteBuffer): JavascriptBundle;
export declare function encodeJavascriptBundleContainer(message: JavascriptBundleContainer, bb: ByteBuffer): void;
export declare function decodeJavascriptBundleContainer(buffer: ByteBuffer): JavascriptBundleContainer;
export declare function encodeModuleImportRecord(message: ModuleImportRecord, bb: ByteBuffer): void;
export declare function decodeModuleImportRecord(buffer: ByteBuffer): ModuleImportRecord;
export declare function encodeJavascriptBundledModule(
message: JavascriptBundledModule,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundledModule(
buffer: ByteBuffer,
): JavascriptBundledModule;
export declare function encodeJavascriptBundledPackage(
message: JavascriptBundledPackage,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundledPackage(
buffer: ByteBuffer,
): JavascriptBundledPackage;
export declare function encodeJavascriptBundle(
message: JavascriptBundle,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundle(
buffer: ByteBuffer,
): JavascriptBundle;
export declare function encodeJavascriptBundleContainer(
message: JavascriptBundleContainer,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundleContainer(
buffer: ByteBuffer,
): JavascriptBundleContainer;
export declare function encodeModuleImportRecord(
message: ModuleImportRecord,
bb: ByteBuffer,
): void;
export declare function decodeModuleImportRecord(
buffer: ByteBuffer,
): ModuleImportRecord;
export declare function encodeModule(message: Module, bb: ByteBuffer): void;
export declare function decodeModule(buffer: ByteBuffer): Module;
export declare function encodeStringMap(message: StringMap, bb: ByteBuffer): void;
export declare function encodeStringMap(
message: StringMap,
bb: ByteBuffer,
): void;
export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
export declare function encodeLoaderMap(
message: LoaderMap,
bb: ByteBuffer,
): void;
export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
export declare function encodeEnvConfig(message: EnvConfig, bb: ByteBuffer): void;
export declare function encodeEnvConfig(
message: EnvConfig,
bb: ByteBuffer,
): void;
export declare function decodeEnvConfig(buffer: ByteBuffer): EnvConfig;
export declare function encodeLoadedEnvConfig(message: LoadedEnvConfig, bb: ByteBuffer): void;
export declare function decodeLoadedEnvConfig(buffer: ByteBuffer): LoadedEnvConfig;
export declare function encodeFrameworkConfig(message: FrameworkConfig, bb: ByteBuffer): void;
export declare function decodeFrameworkConfig(buffer: ByteBuffer): FrameworkConfig;
export declare function encodeFrameworkEntryPoint(message: FrameworkEntryPoint, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPoint(buffer: ByteBuffer): FrameworkEntryPoint;
export declare function encodeFrameworkEntryPointMap(message: FrameworkEntryPointMap, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPointMap(buffer: ByteBuffer): FrameworkEntryPointMap;
export declare function encodeFrameworkEntryPointMessage(message: FrameworkEntryPointMessage, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPointMessage(buffer: ByteBuffer): FrameworkEntryPointMessage;
export declare function encodeLoadedFramework(message: LoadedFramework, bb: ByteBuffer): void;
export declare function decodeLoadedFramework(buffer: ByteBuffer): LoadedFramework;
export declare function encodeLoadedRouteConfig(message: LoadedRouteConfig, bb: ByteBuffer): void;
export declare function decodeLoadedRouteConfig(buffer: ByteBuffer): LoadedRouteConfig;
export declare function encodeRouteConfig(message: RouteConfig, bb: ByteBuffer): void;
export declare function encodeLoadedEnvConfig(
message: LoadedEnvConfig,
bb: ByteBuffer,
): void;
export declare function decodeLoadedEnvConfig(
buffer: ByteBuffer,
): LoadedEnvConfig;
export declare function encodeFrameworkConfig(
message: FrameworkConfig,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkConfig(
buffer: ByteBuffer,
): FrameworkConfig;
export declare function encodeFrameworkEntryPoint(
message: FrameworkEntryPoint,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPoint(
buffer: ByteBuffer,
): FrameworkEntryPoint;
export declare function encodeFrameworkEntryPointMap(
message: FrameworkEntryPointMap,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPointMap(
buffer: ByteBuffer,
): FrameworkEntryPointMap;
export declare function encodeFrameworkEntryPointMessage(
message: FrameworkEntryPointMessage,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPointMessage(
buffer: ByteBuffer,
): FrameworkEntryPointMessage;
export declare function encodeLoadedFramework(
message: LoadedFramework,
bb: ByteBuffer,
): void;
export declare function decodeLoadedFramework(
buffer: ByteBuffer,
): LoadedFramework;
export declare function encodeLoadedRouteConfig(
message: LoadedRouteConfig,
bb: ByteBuffer,
): void;
export declare function decodeLoadedRouteConfig(
buffer: ByteBuffer,
): LoadedRouteConfig;
export declare function encodeRouteConfig(
message: RouteConfig,
bb: ByteBuffer,
): void;
export declare function decodeRouteConfig(buffer: ByteBuffer): RouteConfig;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function encodeTransformOptions(
message: TransformOptions,
bb: ByteBuffer,
): void;
export declare function decodeTransformOptions(
buffer: ByteBuffer,
): TransformOptions;
export declare function encodeFileHandle(
message: FileHandle,
bb: ByteBuffer,
): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function encodeTransform(
message: Transform,
bb: ByteBuffer,
): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeScan(message: Scan, bb: ByteBuffer): void;
export declare function decodeScan(buffer: ByteBuffer): Scan;
export declare function encodeScanResult(message: ScanResult, bb: ByteBuffer): void;
export declare function encodeScanResult(
message: ScanResult,
bb: ByteBuffer,
): void;
export declare function decodeScanResult(buffer: ByteBuffer): ScanResult;
export declare function encodeScannedImport(message: ScannedImport, bb: ByteBuffer): void;
export declare function encodeScannedImport(
message: ScannedImport,
bb: ByteBuffer,
): void;
export declare function decodeScannedImport(buffer: ByteBuffer): ScannedImport;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function encodeOutputFile(
message: OutputFile,
bb: ByteBuffer,
): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeTransformResponse(
message: TransformResponse,
bb: ByteBuffer,
): void;
export declare function decodeTransformResponse(
buffer: ByteBuffer,
): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function encodeMessageData(
message: MessageData,
bb: ByteBuffer,
): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessageMeta(message: MessageMeta, bb: ByteBuffer): void;
export declare function encodeMessageMeta(
message: MessageMeta,
bb: ByteBuffer,
): void;
export declare function decodeMessageMeta(buffer: ByteBuffer): MessageMeta;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;
export declare function encodeWebsocketMessage(message: WebsocketMessage, bb: ByteBuffer): void;
export declare function decodeWebsocketMessage(buffer: ByteBuffer): WebsocketMessage;
export declare function encodeWebsocketMessageWelcome(message: WebsocketMessageWelcome, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageWelcome(buffer: ByteBuffer): WebsocketMessageWelcome;
export declare function encodeWebsocketMessage(
message: WebsocketMessage,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessage(
buffer: ByteBuffer,
): WebsocketMessage;
export declare function encodeWebsocketMessageWelcome(
message: WebsocketMessageWelcome,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageWelcome(
buffer: ByteBuffer,
): WebsocketMessageWelcome;
export declare function encodeWebsocketMessageFileChangeNotification(
message: WebsocketMessageFileChangeNotification,
bb: ByteBuffer,
@@ -793,26 +931,69 @@ export declare function encodeWebsocketMessageFileChangeNotification(
export declare function decodeWebsocketMessageFileChangeNotification(
buffer: ByteBuffer,
): WebsocketMessageFileChangeNotification;
export declare function encodeWebsocketCommand(message: WebsocketCommand, bb: ByteBuffer): void;
export declare function decodeWebsocketCommand(buffer: ByteBuffer): WebsocketCommand;
export declare function encodeWebsocketCommandBuild(message: WebsocketCommandBuild, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandBuild(buffer: ByteBuffer): WebsocketCommandBuild;
export declare function encodeWebsocketCommandManifest(message: WebsocketCommandManifest, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandManifest(buffer: ByteBuffer): WebsocketCommandManifest;
export declare function encodeWebsocketMessageBuildSuccess(message: WebsocketMessageBuildSuccess, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildSuccess(buffer: ByteBuffer): WebsocketMessageBuildSuccess;
export declare function encodeWebsocketMessageBuildFailure(message: WebsocketMessageBuildFailure, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildFailure(buffer: ByteBuffer): WebsocketMessageBuildFailure;
export declare function encodeWebsocketCommand(
message: WebsocketCommand,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommand(
buffer: ByteBuffer,
): WebsocketCommand;
export declare function encodeWebsocketCommandBuild(
message: WebsocketCommandBuild,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandBuild(
buffer: ByteBuffer,
): WebsocketCommandBuild;
export declare function encodeWebsocketCommandManifest(
message: WebsocketCommandManifest,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandManifest(
buffer: ByteBuffer,
): WebsocketCommandManifest;
export declare function encodeWebsocketMessageBuildSuccess(
message: WebsocketMessageBuildSuccess,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageBuildSuccess(
buffer: ByteBuffer,
): WebsocketMessageBuildSuccess;
export declare function encodeWebsocketMessageBuildFailure(
message: WebsocketMessageBuildFailure,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageBuildFailure(
buffer: ByteBuffer,
): WebsocketMessageBuildFailure;
export declare function encodeWebsocketCommandBuildWithFilePath(
message: WebsocketCommandBuildWithFilePath,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandBuildWithFilePath(buffer: ByteBuffer): WebsocketCommandBuildWithFilePath;
export declare function encodeWebsocketMessageResolveID(message: WebsocketMessageResolveID, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageResolveID(buffer: ByteBuffer): WebsocketMessageResolveID;
export declare function encodeNPMRegistry(message: NPMRegistry, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandBuildWithFilePath(
buffer: ByteBuffer,
): WebsocketCommandBuildWithFilePath;
export declare function encodeWebsocketMessageResolveID(
message: WebsocketMessageResolveID,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageResolveID(
buffer: ByteBuffer,
): WebsocketMessageResolveID;
export declare function encodeNPMRegistry(
message: NPMRegistry,
bb: ByteBuffer,
): void;
export declare function decodeNPMRegistry(buffer: ByteBuffer): NPMRegistry;
export declare function encodeNPMRegistryMap(message: NPMRegistryMap, bb: ByteBuffer): void;
export declare function decodeNPMRegistryMap(buffer: ByteBuffer): NPMRegistryMap;
export declare function encodeBunInstall(message: BunInstall, bb: ByteBuffer): void;
export declare function encodeNPMRegistryMap(
message: NPMRegistryMap,
bb: ByteBuffer,
): void;
export declare function decodeNPMRegistryMap(
buffer: ByteBuffer,
): NPMRegistryMap;
export declare function encodeBunInstall(
message: BunInstall,
bb: ByteBuffer,
): void;
export declare function decodeBunInstall(buffer: ByteBuffer): BunInstall;

142
src/api/schema.js generated
View File

@@ -122,7 +122,12 @@ function encodeStackFrame(message, bb) {
var value = message["scope"];
if (value != null) {
var encoded = StackFrameScope[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "StackFrameScope"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "StackFrameScope"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "scope"');
@@ -499,7 +504,10 @@ function encodeFallbackMessageContainer(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = FallbackStep[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "FallbackStep"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "FallbackStep"',
);
bb.writeByte(encoded);
}
@@ -612,7 +620,10 @@ function encodeJSX(message, bb) {
var value = message["runtime"];
if (value != null) {
var encoded = JSXRuntime[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "JSXRuntime"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "JSXRuntime"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "runtime"');
@@ -772,10 +783,12 @@ function decodeJavascriptBundle(bb) {
var length = bb.readVarUint();
var values = (result["modules"] = Array(length));
for (var i = 0; i < length; i++) values[i] = decodeJavascriptBundledModule(bb);
for (var i = 0; i < length; i++)
values[i] = decodeJavascriptBundledModule(bb);
var length = bb.readVarUint();
var values = (result["packages"] = Array(length));
for (var i = 0; i < length; i++) values[i] = decodeJavascriptBundledPackage(bb);
for (var i = 0; i < length; i++)
values[i] = decodeJavascriptBundledPackage(bb);
result["etag"] = bb.readByteArray();
result["generated_at"] = bb.readUint32();
result["app_package_json_dependencies_hash"] = bb.readByteArray();
@@ -829,7 +842,9 @@ function encodeJavascriptBundle(message, bb) {
if (value != null) {
bb.writeByteArray(value);
} else {
throw new Error('Missing required field "app_package_json_dependencies_hash"');
throw new Error(
'Missing required field "app_package_json_dependencies_hash"',
);
}
var value = message["import_from_name"];
@@ -951,7 +966,12 @@ function encodeModuleImportRecord(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = ModuleImportType[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ModuleImportType"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "ModuleImportType"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -1078,7 +1098,10 @@ function encodeLoaderMap(message, bb) {
for (var i = 0; i < n; i++) {
value = values[i];
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
} else {
@@ -1152,7 +1175,10 @@ function encodeLoadedEnvConfig(message, bb) {
var value = message["dotenv"];
if (value != null) {
var encoded = DotEnvBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "DotEnvBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "DotEnvBehavior"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "dotenv"');
@@ -1254,7 +1280,12 @@ function encodeFrameworkConfig(message, bb) {
if (value != null) {
bb.writeByte(6);
var encoded = CSSInJSBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "CSSInJSBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "CSSInJSBehavior"',
);
bb.writeByte(encoded);
}
@@ -1286,7 +1317,11 @@ function encodeFrameworkEntryPoint(message, bb) {
if (value != null) {
var encoded = FrameworkEntryPointType[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "FrameworkEntryPointType"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "FrameworkEntryPointType"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -1435,7 +1470,12 @@ function encodeLoadedFramework(message, bb) {
var value = message["client_css_in_js"];
if (value != null) {
var encoded = CSSInJSBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "CSSInJSBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "CSSInJSBehavior"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "client_css_in_js"');
@@ -1719,7 +1759,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = ResolveMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ResolveMode"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "ResolveMode"',
);
bb.writeByte(encoded);
}
@@ -1817,7 +1860,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(15);
var encoded = Platform[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Platform"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Platform"',
);
bb.writeByte(encoded);
}
@@ -1891,7 +1937,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(26);
var encoded = MessageLevel[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"',
);
bb.writeVarUint(encoded);
}
@@ -1899,7 +1948,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(27);
var encoded = SourceMapMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "SourceMapMode"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "SourceMapMode"',
);
bb.writeByte(encoded);
}
bb.writeByte(0);
@@ -2006,7 +2058,10 @@ function encodeTransform(message, bb) {
if (value != null) {
bb.writeByte(4);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
@@ -2061,7 +2116,10 @@ function encodeScan(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
bb.writeByte(0);
@@ -2126,7 +2184,10 @@ function encodeScannedImport(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = ImportKind[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ImportKind"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "ImportKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2223,7 +2284,11 @@ function encodeTransformResponse(message, bb) {
if (value != null) {
var encoded = TransformResponseStatus[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "TransformResponseStatus"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "TransformResponseStatus"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "status"');
@@ -2434,7 +2499,10 @@ function encodeMessage(message, bb) {
var value = message["level"];
if (value != null) {
var encoded = MessageLevel[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "level"');
@@ -2596,7 +2664,11 @@ function encodeWebsocketMessage(message, bb) {
if (value != null) {
var encoded = WebsocketMessageKind[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "WebsocketMessageKind"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "WebsocketMessageKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2624,7 +2696,10 @@ function encodeWebsocketMessageWelcome(message, bb) {
var value = message["javascriptReloader"];
if (value != null) {
var encoded = Reloader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Reloader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Reloader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "javascriptReloader"');
@@ -2664,7 +2739,10 @@ function encodeWebsocketMessageFileChangeNotification(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');
@@ -2684,7 +2762,11 @@ function encodeWebsocketCommand(message, bb) {
if (value != null) {
var encoded = WebsocketCommandKind[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "WebsocketCommandKind"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "WebsocketCommandKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2759,7 +2841,10 @@ function encodeWebsocketMessageBuildSuccess(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');
@@ -2809,7 +2894,10 @@ function encodeWebsocketMessageBuildFailure(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');

View File

@@ -37,28 +37,24 @@ pub fn decodeURLSafe(destination: []u8, source: []const u8) DecodeResult {
pub fn encode(destination: []u8, source: []const u8) usize {
return zig_base64.standard.Encoder.encode(destination, source).len;
}
pub fn decodeLenUpperBound(len: usize) usize {
return zig_base64.standard.Decoder.calcSizeUpperBound(len) catch {
//fallback
return len / 4 * 3;
};
}
/// Given a source string of length len, this returns the amount of
/// memory the destination string should have.
///
/// remember, this is integer math
/// 3 bytes turn into 4 chars
/// ceiling[len / 3] * 4
///
///
pub fn decodeLen(source: anytype) usize {
return zig_base64.standard.Decoder.calcSizeForSlice(source) catch {
//fallback
return source.len / 4 * 3;
};
return (source.len / 4 * 3 + 2);
}
pub fn encodeLen(source: anytype) usize {
return zig_base64.standard.Encoder.calcSize(source.len);
return (source.len + 2) / 3 * 4;
}
pub const urlsafe = zig_base64.Base64DecoderWithIgnore.init(
zig_base64.url_safe_alphabet_chars,
null,

View File

@@ -1 +1 @@
5
0

View File

@@ -964,7 +964,6 @@ fn doResolveWithArgs(
comptime is_file_path: bool,
) ?JSC.JSValue {
var errorable: ErrorableZigString = undefined;
var query_string = ZigString.Empty;
if (comptime is_file_path) {
VirtualMachine.resolveFilePathForAPI(
@@ -972,7 +971,6 @@ fn doResolveWithArgs(
ctx.ptr(),
specifier,
from,
&query_string,
is_esm,
);
} else {
@@ -981,7 +979,6 @@ fn doResolveWithArgs(
ctx.ptr(),
specifier,
from,
&query_string,
is_esm,
);
}
@@ -991,23 +988,7 @@ fn doResolveWithArgs(
return null;
}
if (query_string.len > 0) {
var stack = std.heap.stackFallback(1024, ctx.allocator());
const allocator = stack.get();
var arraylist = std.ArrayList(u8).initCapacity(allocator, 1024) catch unreachable;
defer arraylist.deinit();
arraylist.writer().print("{any}{any}", .{
errorable.result.value,
query_string,
}) catch {
JSC.JSError(allocator, "Failed to allocate memory", .{}, ctx, exception);
return null;
};
return ZigString.initUTF8(arraylist.items).toValueGC(ctx);
}
return errorable.result.value.toValue(ctx);
return errorable.result.value.toValue(ctx.ptr());
}
pub fn resolveSync(
@@ -1548,19 +1529,8 @@ pub const Crypto = struct {
return EVP.init(algorithm, BoringSSL.EVP_blake2b256(), engine);
}
switch (algorithm) {
.md4 => return EVP.init(algorithm, BoringSSL.EVP_md4(), engine),
.md5 => return EVP.init(algorithm, BoringSSL.EVP_md5(), engine),
.sha1 => return EVP.init(algorithm, BoringSSL.EVP_sha1(), engine),
.sha224 => return EVP.init(algorithm, BoringSSL.EVP_sha224(), engine),
.sha256 => return EVP.init(algorithm, BoringSSL.EVP_sha256(), engine),
.sha384 => return EVP.init(algorithm, BoringSSL.EVP_sha384(), engine),
.sha512 => return EVP.init(algorithm, BoringSSL.EVP_sha512(), engine),
.@"sha512-256" => return EVP.init(algorithm, BoringSSL.EVP_sha512_256(), engine),
else => {
if (BoringSSL.EVP_get_digestbyname(@tagName(algorithm))) |md|
return EVP.init(algorithm, md, engine);
},
if (BoringSSL.EVP_get_digestbyname(@tagName(algorithm))) |md| {
return EVP.init(algorithm, md, engine);
}
}
@@ -1582,7 +1552,7 @@ pub const Crypto = struct {
_ = BoringSSL.ERR_error_string_n(err_code, message_buf, message_buf.len);
const error_message: []const u8 = bun.sliceTo(outbuf[0..], 0);
const error_message: []const u8 = bun.span(std.meta.assumeSentinel(&outbuf, 0));
if (error_message.len == "BoringSSL error: ".len) {
return ZigString.static("Unknown BoringSSL error").toErrorInstance(globalThis);
}
@@ -2328,7 +2298,15 @@ pub fn getTranspilerConstructor(
_: js.JSStringRef,
_: js.ExceptionRef,
) js.JSValueRef {
return JSC.API.Bun.Transpiler.getConstructor(ctx).asObjectRef();
var existing = ctx.ptr().getCachedObject(ZigString.static("BunTranspiler"));
if (existing.isEmpty()) {
return ctx.ptr().putCachedObject(
&ZigString.init("BunTranspiler"),
JSC.JSValue.fromRef(Transpiler.Constructor.constructor(ctx)),
).asObjectRef();
}
return existing.asObjectRef();
}
pub fn getFileSystemRouter(
@@ -2832,7 +2810,7 @@ pub const Timer = struct {
}
var this = args.ptr[1].asPtr(CallbackJob);
globalThis.bunVM().onUnhandledError(globalThis, args.ptr[0]);
globalThis.bunVM().runErrorHandlerWithDedupe(args.ptr[0], null);
this.deinit();
return JSValue.jsUndefined();
}
@@ -2901,7 +2879,7 @@ pub const Timer = struct {
}
if (result.isAnyError()) {
vm.onUnhandledError(globalThis, result);
vm.runErrorHandlerWithDedupe(result, null);
this.deinit();
return;
}
@@ -2910,7 +2888,7 @@ pub const Timer = struct {
switch (promise.status(globalThis.vm())) {
.Rejected => {
this.deinit();
vm.onUnhandledError(globalThis, promise.result(globalThis.vm()));
vm.runErrorHandlerWithDedupe(promise.result(globalThis.vm()), null);
},
.Fulfilled => {
this.deinit();
@@ -3021,7 +2999,7 @@ pub const Timer = struct {
var vm = this.globalThis.bunVM();
this.poll_ref.unrefOnNextTick(vm);
this.poll_ref.unref(vm);
this.timer.deinit();
this.callback.deinit();
this.arguments.deinit();
@@ -3139,16 +3117,12 @@ pub const Timer = struct {
return JSValue.jsNumberWithType(i32, id);
}
pub fn clearTimer(timer_id: JSValue, globalThis: *JSGlobalObject, repeats: bool) void {
pub fn clearTimer(timer_id: JSValue, _: *JSGlobalObject, repeats: bool) void {
JSC.markBinding(@src());
var map = if (repeats) &VirtualMachine.get().timer.interval_map else &VirtualMachine.get().timer.timeout_map;
if (!timer_id.isAnyInt()) {
return;
}
const id: Timeout.ID = .{
.id = timer_id.coerce(i32, globalThis),
.id = timer_id.toInt32(),
.repeat = @as(u32, @boolToInt(repeats)),
};
var timer = map.fetchSwapRemove(id.id) orelse return;
@@ -3902,8 +3876,8 @@ pub const EnvironmentVariables = struct {
}
};
export fn Bun__reportError(globalObject: *JSGlobalObject, err: JSC.JSValue) void {
JSC.VirtualMachine.runErrorHandlerWithDedupe(globalObject.bunVM(), err, null);
export fn Bun__reportError(_: *JSGlobalObject, err: JSC.JSValue) void {
JSC.VirtualMachine.get().runErrorHandler(err, null);
}
comptime {

View File

@@ -614,104 +614,6 @@ pub const GetAddrInfo = struct {
};
};
pub fn ResolveInfoRequest(comptime cares_type: type, comptime type_name: []const u8) type {
return struct {
const request_type = @This();
const log = Output.scoped(@This(), false);
resolver_for_caching: ?*DNSResolver = null,
hash: u64 = 0,
cache: @This().CacheConfig = @This().CacheConfig{},
head: CAresLookup(cares_type, type_name),
tail: *CAresLookup(cares_type, type_name) = undefined,
pub fn init(
cache: DNSResolver.LookupCacheHit(@This()),
resolver: ?*DNSResolver,
name: []const u8,
globalThis: *JSC.JSGlobalObject,
comptime cache_field: []const u8,
) !*@This() {
var request = try globalThis.allocator().create(@This());
var hasher = std.hash.Wyhash.init(0);
hasher.update(name);
const hash = hasher.final();
var poll_ref = JSC.PollRef.init();
poll_ref.ref(globalThis.bunVM());
request.* = .{
.resolver_for_caching = resolver,
.hash = hash,
.head = .{ .poll_ref = poll_ref, .globalThis = globalThis, .promise = JSC.JSPromise.Strong.init(globalThis), .allocated = false, .name = name },
};
request.tail = &request.head;
if (cache == .new) {
request.resolver_for_caching = resolver;
request.cache = @This().CacheConfig{
.pending_cache = true,
.entry_cache = false,
.pos_in_pending = @truncate(u5, @field(resolver.?, cache_field).indexOf(cache.new).?),
.name_len = @truncate(u9, name.len),
};
cache.new.lookup = request;
}
return request;
}
pub const CacheConfig = packed struct(u16) {
pending_cache: bool = false,
entry_cache: bool = false,
pos_in_pending: u5 = 0,
name_len: u9 = 0,
};
pub const PendingCacheKey = struct {
hash: u64,
len: u16,
lookup: *request_type = undefined,
pub fn append(this: *PendingCacheKey, cares_lookup: *CAresLookup(cares_type, type_name)) void {
var tail = this.lookup.tail;
tail.next = cares_lookup;
this.lookup.tail = cares_lookup;
}
pub fn init(name: []const u8) PendingCacheKey {
var hasher = std.hash.Wyhash.init(0);
hasher.update(name);
const hash = hasher.final();
return PendingCacheKey{
.hash = hash,
.len = @truncate(u16, name.len),
.lookup = undefined,
};
}
};
pub fn onCaresComplete(this: *@This(), err_: ?c_ares.Error, timeout: i32, result: ?*cares_type) void {
if (this.resolver_for_caching) |resolver| {
if (this.cache.pending_cache) {
resolver.drainPendingCares(
this.cache.pos_in_pending,
err_,
timeout,
@This(),
cares_type,
type_name,
result,
);
return;
}
}
var head = this.head;
bun.default_allocator.destroy(this);
head.processResolve(err_, timeout, result);
}
};
}
pub const GetAddrInfoRequest = struct {
const log = Output.scoped(.GetAddrInfoRequest, false);
@@ -732,15 +634,12 @@ pub const GetAddrInfoRequest = struct {
comptime cache_field: []const u8,
) !*GetAddrInfoRequest {
var request = try globalThis.allocator().create(GetAddrInfoRequest);
var poll_ref = JSC.PollRef.init();
poll_ref.ref(globalThis.bunVM());
request.* = .{
.backend = backend,
.resolver_for_caching = resolver,
.hash = query.hash(),
.head = .{
.globalThis = globalThis,
.poll_ref = poll_ref,
.promise = JSC.JSPromise.Strong.init(globalThis),
.allocated = false,
},
@@ -831,7 +730,7 @@ pub const GetAddrInfoRequest = struct {
var hostname: [bun.MAX_PATH_BYTES]u8 = undefined;
_ = strings.copy(hostname[0..], query.name);
hostname[query.name.len] = 0;
var addrinfo: ?*std.c.addrinfo = null;
var addrinfo: *std.c.addrinfo = undefined;
var host = hostname[0..query.name.len :0];
const debug_timer = bun.Output.DebugTimer.start();
const err = std.c.getaddrinfo(
@@ -846,16 +745,16 @@ pub const GetAddrInfoRequest = struct {
err,
debug_timer,
});
if (@enumToInt(err) != 0 or addrinfo == null) {
if (@enumToInt(err) != 0) {
this.* = .{ .err = @enumToInt(err) };
return;
}
// do not free addrinfo when err != 0
// https://github.com/ziglang/zig/pull/14242
defer std.c.freeaddrinfo(addrinfo.?);
defer std.c.freeaddrinfo(addrinfo);
this.* = .{ .success = GetAddrInfo.Result.toList(default_allocator, addrinfo.?) catch unreachable };
this.* = .{ .success = GetAddrInfo.Result.toList(default_allocator, addrinfo) catch unreachable };
}
},
@@ -933,78 +832,6 @@ pub const GetAddrInfoRequest = struct {
}
};
pub fn CAresLookup(comptime cares_type: type, comptime type_name: []const u8) type {
return struct {
const log = Output.scoped(@This(), true);
globalThis: *JSC.JSGlobalObject = undefined,
promise: JSC.JSPromise.Strong,
poll_ref: JSC.PollRef,
allocated: bool = false,
next: ?*@This() = null,
name: []const u8,
pub fn init(globalThis: *JSC.JSGlobalObject, allocator: std.mem.Allocator, name: []const u8) !*@This() {
var this = try allocator.create(@This());
var poll_ref = JSC.PollRef.init();
poll_ref.ref(globalThis.bunVM());
this.* = .{ .globalThis = globalThis, .promise = JSC.JSPromise.Strong.init(globalThis), .poll_ref = poll_ref, .allocated = true, .name = name };
return this;
}
pub fn processResolve(this: *@This(), err_: ?c_ares.Error, _: i32, result: ?*cares_type) void {
if (err_) |err| {
var promise = this.promise;
var globalThis = this.globalThis;
const error_value = globalThis.createErrorInstance("{s} lookup failed: {s}", .{ type_name, err.label() });
error_value.put(
globalThis,
JSC.ZigString.static("code"),
JSC.ZigString.init(err.code()).toValueGC(globalThis),
);
promise.reject(globalThis, error_value);
this.deinit();
return;
}
if (result == null) {
var promise = this.promise;
var globalThis = this.globalThis;
const error_value = globalThis.createErrorInstance("{s} lookup failed: {s}", .{ type_name, "No results" });
error_value.put(
globalThis,
JSC.ZigString.static("code"),
JSC.ZigString.init("EUNREACHABLE").toValueGC(globalThis),
);
promise.reject(globalThis, error_value);
this.deinit();
return;
}
var node = result.?;
const array = node.toJSReponse(this.globalThis.allocator(), this.globalThis, type_name);
this.onComplete(array);
return;
}
pub fn onComplete(this: *@This(), result: JSC.JSValue) void {
var promise = this.promise;
var globalThis = this.globalThis;
this.promise = .{};
promise.resolve(globalThis, result);
this.deinit();
}
pub fn deinit(this: *@This()) void {
this.poll_ref.unrefOnNextTick(this.globalThis.bunVM());
bun.default_allocator.free(this.name);
if (this.allocated)
this.globalThis.allocator().destroy(this);
}
};
}
pub const DNSLookup = struct {
const log = Output.scoped(.DNSLookup, true);
@@ -1012,16 +839,11 @@ pub const DNSLookup = struct {
promise: JSC.JSPromise.Strong,
allocated: bool = false,
next: ?*DNSLookup = null,
poll_ref: JSC.PollRef,
pub fn init(globalThis: *JSC.JSGlobalObject, allocator: std.mem.Allocator) !*DNSLookup {
var this = try allocator.create(DNSLookup);
var poll_ref = JSC.PollRef.init();
poll_ref.ref(globalThis.bunVM());
this.* = .{
.globalThis = globalThis,
.poll_ref = poll_ref,
.promise = JSC.JSPromise.Strong.init(globalThis),
.allocated = true,
};
@@ -1071,8 +893,9 @@ pub const DNSLookup = struct {
JSC.ZigString.init(err.code()).toValueGC(globalThis),
);
promise.reject(globalThis, error_value);
this.deinit();
promise.reject(globalThis, error_value);
return;
}
@@ -1086,8 +909,8 @@ pub const DNSLookup = struct {
JSC.ZigString.init("EUNREACHABLE").toValueGC(globalThis),
);
promise.reject(globalThis, error_value);
this.deinit();
promise.reject(globalThis, error_value);
return;
}
@@ -1104,12 +927,11 @@ pub const DNSLookup = struct {
var globalThis = this.globalThis;
this.promise = .{};
promise.resolve(globalThis, result);
this.deinit();
promise.resolveOnNextTick(globalThis, result);
}
pub fn deinit(this: *DNSLookup) void {
this.poll_ref.unrefOnNextTick(this.globalThis.bunVM());
if (this.allocated)
this.globalThis.allocator().destroy(this);
}
@@ -1140,29 +962,12 @@ pub const DNSResolver = struct {
pending_host_cache_cares: PendingCache = PendingCache.init(),
pending_host_cache_native: PendingCache = PendingCache.init(),
pending_srv_cache_cares: SrvPendingCache = SrvPendingCache.init(),
pending_soa_cache_cares: SoaPendingCache = SoaPendingCache.init(),
pending_txt_cache_cares: TxtPendingCache = TxtPendingCache.init(),
pending_naptr_cache_cares: NaptrPendingCache = NaptrPendingCache.init(),
pending_mx_cache_cares: MxPendingCache = MxPendingCache.init(),
pending_caa_cache_cares: CaaPendingCache = CaaPendingCache.init(),
pending_ns_cache_cares: NSPendingCache = NSPendingCache.init(),
pending_ptr_cache_cares: PtrPendingCache = PtrPendingCache.init(),
pending_cname_cache_cares: CnamePendingCache = CnamePendingCache.init(),
// entry_host_cache: std.BoundedArray(128)
const PendingCache = bun.HiveArray(GetAddrInfoRequest.PendingCacheKey, 32);
const SrvPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_srv_reply, "srv").PendingCacheKey, 32);
const SoaPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_soa_reply, "soa").PendingCacheKey, 32);
const TxtPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_txt_reply, "txt").PendingCacheKey, 32);
const NaptrPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_naptr_reply, "naptr").PendingCacheKey, 32);
const MxPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_mx_reply, "mx").PendingCacheKey, 32);
const CaaPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_ares_caa_reply, "caa").PendingCacheKey, 32);
const NSPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_hostent, "ns").PendingCacheKey, 32);
const PtrPendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_hostent, "ptr").PendingCacheKey, 32);
const CnamePendingCache = bun.HiveArray(ResolveInfoRequest(c_ares.struct_hostent, "cname").PendingCacheKey, 32);
fn getKey(this: *DNSResolver, index: u8, comptime cache_name: []const u8, comptime request_type: type) request_type.PendingCacheKey {
var cache = &@field(this, cache_name);
fn getKey(this: *DNSResolver, index: u8, comptime cache_name: []const u8) GetAddrInfoRequest.PendingCacheKey {
var cache: *PendingCache = &@field(this, cache_name);
std.debug.assert(!cache.available.isSet(index));
const entry = cache.buffer[index];
cache.buffer[index] = undefined;
@@ -1174,51 +979,8 @@ pub const DNSResolver = struct {
return entry;
}
pub fn drainPendingCares(this: *DNSResolver, index: u8, err: ?c_ares.Error, timeout: i32, comptime request_type: type, comptime cares_type: type, comptime lookup_name: []const u8, result: ?*cares_type) void {
const cache_name = comptime std.fmt.comptimePrint("pending_{s}_cache_cares", .{lookup_name});
const key = this.getKey(index, cache_name, request_type);
var addr = result orelse {
var pending: ?*CAresLookup(cares_type, lookup_name) = key.lookup.head.next;
key.lookup.head.processResolve(err, timeout, null);
bun.default_allocator.destroy(key.lookup);
while (pending) |value| {
pending = value.next;
value.processResolve(err, timeout, null);
}
return;
};
var pending: ?*CAresLookup(cares_type, lookup_name) = key.lookup.head.next;
var prev_global = key.lookup.head.globalThis;
var array = addr.toJSReponse(this.vm.allocator, prev_global, lookup_name);
defer addr.deinit();
array.ensureStillAlive();
key.lookup.head.onComplete(array);
bun.default_allocator.destroy(key.lookup);
array.ensureStillAlive();
while (pending) |value| {
var new_global = value.globalThis;
if (prev_global != new_global) {
array = addr.toJSReponse(this.vm.allocator, new_global, lookup_name);
prev_global = new_global;
}
pending = value.next;
{
array.ensureStillAlive();
value.onComplete(array);
array.ensureStillAlive();
}
}
}
pub fn drainPendingHostCares(this: *DNSResolver, index: u8, err: ?c_ares.Error, timeout: i32, result: ?*c_ares.AddrInfo) void {
const key = this.getKey(index, "pending_host_cache_cares", GetAddrInfoRequest);
const key = this.getKey(index, "pending_host_cache_cares");
var addr = result orelse {
var pending: ?*DNSLookup = key.lookup.head.next;
@@ -1260,7 +1022,7 @@ pub const DNSResolver = struct {
}
pub fn drainPendingHostNative(this: *DNSResolver, index: u8, globalObject: *JSC.JSGlobalObject, err: i32, result: GetAddrInfo.Result.Any) void {
const key = this.getKey(index, "pending_host_cache_native", GetAddrInfoRequest);
const key = this.getKey(index, "pending_host_cache_native");
var array = result.toJS(globalObject) orelse {
var pending: ?*DNSLookup = key.lookup.head.next;
@@ -1309,43 +1071,6 @@ pub const DNSResolver = struct {
disabled: void,
};
pub fn LookupCacheHit(comptime request_type: type) type {
return union(enum) {
inflight: *request_type.PendingCacheKey,
new: *request_type.PendingCacheKey,
disabled: void,
};
}
pub fn getOrPutIntoResolvePendingCache(
this: *DNSResolver,
comptime request_type: type,
key: request_type.PendingCacheKey,
comptime field: []const u8,
) LookupCacheHit(request_type) {
var cache = &@field(this, field);
var inflight_iter = cache.available.iterator(
.{
.kind = .unset,
},
);
while (inflight_iter.next()) |index| {
var entry: *request_type.PendingCacheKey = &cache.buffer[index];
if (entry.hash == key.hash and entry.len == key.len) {
return .{ .inflight = entry };
}
}
if (cache.get()) |new| {
new.hash = key.hash;
new.len = key.len;
return .{ .new = new };
}
return .{ .disabled = {} };
}
pub fn getOrPutIntoPendingCache(
this: *DNSResolver,
key: GetAddrInfoRequest.PendingCacheKey,
@@ -1511,6 +1236,7 @@ pub const DNSResolver = struct {
return .zero;
};
};
_ = record_type;
const name_value = arguments.ptr[0];
@@ -1528,44 +1254,9 @@ pub const DNSResolver = struct {
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
//TODO: ANY CASE
switch (record_type) {
RecordType.A => {
defer name.deinit();
const options = GetAddrInfo.Options{ .family = GetAddrInfo.Family.inet };
return resolver.doLookup(name.slice(), 0, options, globalThis);
},
RecordType.AAAA => {
defer name.deinit();
const options = GetAddrInfo.Options{ .family = GetAddrInfo.Family.inet6 };
return resolver.doLookup(name.slice(), 0, options, globalThis);
},
RecordType.CNAME => {
return resolver.doResolveCAres(c_ares.struct_hostent, "cname", name.slice(), globalThis);
},
RecordType.MX => {
return resolver.doResolveCAres(c_ares.struct_ares_mx_reply, "mx", name.slice(), globalThis);
},
RecordType.NS => {
return resolver.doResolveCAres(c_ares.struct_hostent, "ns", name.slice(), globalThis);
},
RecordType.PTR => {
return resolver.doResolveCAres(c_ares.struct_hostent, "ptr", name.slice(), globalThis);
},
RecordType.SOA => {
return resolver.doResolveCAres(c_ares.struct_ares_soa_reply, "soa", name.slice(), globalThis);
},
RecordType.SRV => {
return resolver.doResolveCAres(c_ares.struct_ares_srv_reply, "srv", name.slice(), globalThis);
},
RecordType.TXT => {
return resolver.doResolveCAres(c_ares.struct_ares_txt_reply, "txt", name.slice(), globalThis);
},
}
// const name = name_str.toSliceZ(globalThis).cloneZ(bun.default_allocator) catch unreachable;
// TODO:
return JSC.JSValue.jsUndefined();
}
// pub fn reverse(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
// const arguments = callframe.arguments(3);
@@ -1639,332 +1330,6 @@ pub const DNSResolver = struct {
};
}
pub fn resolveSrv(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveSrv", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveSrv", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveSrv", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_srv_reply, "srv", name.slice(), globalThis);
}
pub fn resolveSoa(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveSoa", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveSoa", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveSoa", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_soa_reply, "soa", name.slice(), globalThis);
}
pub fn resolveCaa(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveCaa", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveCaa", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveCaa", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_caa_reply, "caa", name.slice(), globalThis);
}
pub fn resolveNs(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveNs", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveNs", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveNs", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_hostent, "ns", name.slice(), globalThis);
}
pub fn resolvePtr(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolvePtr", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolvePtr", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolvePtr", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_hostent, "ptr", name.slice(), globalThis);
}
pub fn resolveCname(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveCname", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveCname", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveCname", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_hostent, "cname", name.slice(), globalThis);
}
pub fn resolveMx(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveMx", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveMx", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveMx", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_mx_reply, "mx", name.slice(), globalThis);
}
pub fn resolveNaptr(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveNaptr", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveNaptr", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveNaptr", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_naptr_reply, "naptr", name.slice(), globalThis);
}
pub fn resolveTxt(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
if (arguments.len < 1) {
globalThis.throwNotEnoughArguments("resolveTxt", 2, arguments.len);
return .zero;
}
const name_value = arguments.ptr[0];
if (name_value.isEmptyOrUndefinedOrNull() or !name_value.isString()) {
globalThis.throwInvalidArgumentType("resolveTxt", "hostname", "string");
return .zero;
}
const name_str = name_value.toStringOrNull(globalThis) orelse {
return .zero;
};
if (name_str.length() == 0) {
globalThis.throwInvalidArgumentType("resolveTxt", "hostname", "non-empty string");
return .zero;
}
const name = name_str.toSliceClone(globalThis, bun.default_allocator);
var vm = globalThis.bunVM();
var resolver = vm.rareData().globalDNSResolver(vm);
return resolver.doResolveCAres(c_ares.struct_ares_txt_reply, "txt", name.slice(), globalThis);
}
pub fn doResolveCAres(this: *DNSResolver, comptime cares_type: type, comptime type_name: []const u8, name: []const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue {
var channel: *c_ares.Channel = switch (this.getChannel()) {
.result => |res| res,
.err => |err| {
const system_error = JSC.SystemError{
.errno = -1,
.code = JSC.ZigString.init(err.code()),
.message = JSC.ZigString.init(err.label()),
};
globalThis.throwValue(system_error.toErrorInstance(globalThis));
return .zero;
},
};
const cache_name = comptime std.fmt.comptimePrint("pending_{s}_cache_cares", .{type_name});
const key = ResolveInfoRequest(cares_type, type_name).PendingCacheKey.init(name);
var cache = this.getOrPutIntoResolvePendingCache(ResolveInfoRequest(cares_type, type_name), key, cache_name);
if (cache == .inflight) {
// CAresLookup will have the name ownership
var cares_lookup = CAresLookup(cares_type, type_name).init(globalThis, globalThis.allocator(), name) catch unreachable;
cache.inflight.append(cares_lookup);
return cares_lookup.promise.value();
}
var request = ResolveInfoRequest(cares_type, type_name).init(
cache,
this,
name, // CAresLookup will have the ownership
globalThis,
cache_name,
) catch unreachable;
const promise = request.tail.promise.value();
channel.resolve(
name,
type_name,
ResolveInfoRequest(cares_type, type_name),
request,
cares_type,
ResolveInfoRequest(cares_type, type_name).onCaresComplete,
);
return promise;
}
pub fn c_aresLookupWithNormalizedName(this: *DNSResolver, query: GetAddrInfo, globalThis: *JSC.JSGlobalObject) JSC.JSValue {
var channel: *c_ares.Channel = switch (this.getChannel()) {
.result => |res| res,
@@ -2015,72 +1380,12 @@ pub const DNSResolver = struct {
}
comptime {
@export(
resolve,
.{
.name = "Bun__DNSResolver__resolve",
},
);
@export(
lookup,
.{
.name = "Bun__DNSResolver__lookup",
},
);
@export(
resolveTxt,
.{
.name = "Bun__DNSResolver__resolveTxt",
},
);
@export(
resolveSoa,
.{
.name = "Bun__DNSResolver__resolveSoa",
},
);
@export(
resolveMx,
.{
.name = "Bun__DNSResolver__resolveMx",
},
);
@export(
resolveNaptr,
.{
.name = "Bun__DNSResolver__resolveNaptr",
},
);
@export(
resolveSrv,
.{
.name = "Bun__DNSResolver__resolveSrv",
},
);
@export(
resolveCaa,
.{
.name = "Bun__DNSResolver__resolveCaa",
},
);
@export(
resolveNs,
.{
.name = "Bun__DNSResolver__resolveNs",
},
);
@export(
resolvePtr,
.{
.name = "Bun__DNSResolver__resolvePtr",
},
);
@export(
resolveCname,
.{
.name = "Bun__DNSResolver__resolveCname",
},
);
}
// pub fn lookupService(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue {
// const arguments = callframe.arguments(3);

View File

@@ -65,9 +65,6 @@ fn normalizeHost(input: anytype) @TypeOf(input) {
return input;
}
const BinaryType = JSC.BinaryType;
const Handlers = struct {
onOpen: JSC.JSValue = .zero,
onClose: JSC.JSValue = .zero,
@@ -78,7 +75,7 @@ const Handlers = struct {
onEnd: JSC.JSValue = .zero,
onError: JSC.JSValue = .zero,
binary_type: BinaryType = .Buffer,
encoding: JSC.Node.Encoding = .utf8,
vm: *JSC.VirtualMachine,
globalObject: *JSC.JSGlobalObject,
@@ -153,6 +150,7 @@ const Handlers = struct {
.{ "onWritable", "drain" },
.{ "onOpen", "open" },
.{ "onClose", "close" },
.{ "onData", "data" },
.{ "onTimeout", "timeout" },
.{ "onConnectError", "connectError" },
.{ "onEnd", "end" },
@@ -174,18 +172,6 @@ const Handlers = struct {
return null;
}
if (opts.getTruthy(globalObject, "binaryType")) |binary_type_value| {
if (!binary_type_value.isString()) {
exception.* = JSC.toInvalidArguments("Expected \"binaryType\" to be a string", .{}, globalObject).asObjectRef();
return null;
}
handlers.binary_type = BinaryType.fromJSValue(globalObject, binary_type_value) orelse {
exception.* = JSC.toInvalidArguments("Expected 'binaryType' to be 'arraybuffer', 'uint8array', 'buffer'", .{}, globalObject).asObjectRef();
return null;
};
}
return handlers;
}
@@ -1158,7 +1144,7 @@ fn NewSocket(comptime ssl: bool) type {
const globalObject = handlers.globalObject;
const this_value = this.getThisValue(globalObject);
const output_value = handlers.binary_type.toJS(data, globalObject);
const output_value = JSC.ArrayBuffer.create(globalObject, data, .Uint8Array);
// const encoding = handlers.encoding;
const result = callback.callWithThis(globalObject, this_value, &[_]JSValue{
this_value,
@@ -1302,7 +1288,7 @@ fn NewSocket(comptime ssl: bool) type {
}
// we don't cork yet but we might later
const res = this.socket.write(buffer, is_end);
log("write({d}, {any}) = {d}", .{ buffer.len, is_end, res });
log("write({d}, {any})", .{ buffer.len, is_end });
return res;
}

View File

@@ -1480,6 +1480,9 @@ pub const Subprocess = struct {
if (std.os.W.IFSIGNALED(result.status)) {
this.signal_code = @intToEnum(SignalCode, @truncate(u8, std.os.W.TERMSIG(result.status)));
if (this.exit_code == null) {
this.exit_code = 1;
}
} else if (std.os.W.IFSTOPPED(result.status)) {
this.signal_code = @intToEnum(SignalCode, @truncate(u8, std.os.W.STOPSIG(result.status)));
}

View File

@@ -1,7 +1,16 @@
import { define } from "../scripts/class-definitions";
const names = ["SHA1", "MD5", "MD4", "SHA224", "SHA512", "SHA384", "SHA256", "SHA512_256"];
const named = names.map(name => {
const names = [
"SHA1",
"MD5",
"MD4",
"SHA224",
"SHA512",
"SHA384",
"SHA256",
"SHA512_256",
];
const named = names.map((name) => {
return define({
name: name,
construct: true,

View File

@@ -290,7 +290,7 @@ pub const FFI = struct {
return JSC.toInvalidArguments("Expected an options object with symbol names", .{}, global);
}
const name = name_slice.slice();
const name = name_slice.sliceZ();
var symbols = bun.StringArrayHashMapUnmanaged(Function){};
if (generateSymbols(global, &symbols, object) catch JSC.JSValue.zero) |val| {
// an error while validating symbols

View File

@@ -448,7 +448,6 @@ pub const ServerConfig = struct {
args.base_url = URL.parse(args.base_uri);
}
} else {
const hostname: string =
if (has_hostname and std.mem.span(args.hostname).len > 0) std.mem.span(args.hostname) else "0.0.0.0";
const protocol: string = if (args.ssl_config != null) "https" else "http";
@@ -705,16 +704,14 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
const arguments = callframe.arguments(2);
var ctx = arguments.ptr[1].asPromisePtr(@This());
const result = arguments.ptr[0];
result.ensureStillAlive();
ctx.pending_promises_for_abort -|= 1;
if (ctx.aborted) {
ctx.finalizeForAbort();
return JSValue.jsUndefined();
}
if (ctx.didUpgradeWebSocket()) {
ctx.finalize();
if (result.isEmptyOrUndefinedOrNull()) {
ctx.renderMissing();
return JSValue.jsUndefined();
}
@@ -723,7 +720,12 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp
}
fn handleResolve(ctx: *RequestContext, value: JSC.JSValue) void {
if (value.isEmptyOrUndefinedOrNull() or !value.isCell()) {
if (ctx.didUpgradeWebSocket()) {
ctx.finalize();
return;
}
if (value.isEmptyOrUndefinedOrNull()) {
ctx.renderMissing();
return;
}
@@ -4371,7 +4373,6 @@ pub fn NewServer(comptime ssl_enabled_: bool, comptime debug_mode_: bool) type {
}
pub fn stopListening(this: *ThisServer, abrupt: bool) void {
httplog("stopListening", .{});
var listener = this.listener orelse return;
this.listener = null;
this.unref();
@@ -4436,7 +4437,6 @@ pub fn NewServer(comptime ssl_enabled_: bool, comptime debug_mode_: bool) type {
}
noinline fn onListenFailed(this: *ThisServer) void {
httplog("onListenFailed", .{});
this.unref();
var zig_str: ZigString = ZigString.init("");
@@ -4528,7 +4528,7 @@ pub fn NewServer(comptime ssl_enabled_: bool, comptime debug_mode_: bool) type {
pub fn unref(this: *ThisServer) void {
if (!this.poll_ref.isActive()) return;
this.poll_ref.unrefOnNextTick(this.vm);
this.poll_ref.unref(this.vm);
this.vm.eventLoop().start_server_on_next_tick = false;
}
@@ -4712,7 +4712,6 @@ pub fn NewServer(comptime ssl_enabled_: bool, comptime debug_mode_: bool) type {
}
pub fn listen(this: *ThisServer) void {
httplog("listen", .{});
if (ssl_enabled) {
BoringSSL.load();
const ssl_config = this.config.ssl_config orelse @panic("Assertion failure: ssl_config");

View File

@@ -1,40 +0,0 @@
import { define } from "../scripts/class-definitions";
export default [
define({
name: "Transpiler",
construct: true,
finalize: true,
hasPendingActivity: false,
klass: {},
JSType: "0b11101110",
proto: {
scanImports: {
fn: "scanImports",
length: 2,
},
scan: {
fn: "scan",
length: 2,
},
transform: {
fn: "transform",
length: 2,
},
transformSync: {
fn: "transformSync",
length: 2,
},
},
custom: {
onLoadPlugins: {
extraHeaderIncludes: ["BunPlugin.h"],
impl: "JSTranspiler+BunPlugin-impl.h",
type: `WTF::Vector<std::unique_ptr<BunPlugin::OnLoad>>`,
},
onResolvePlugins: {
type: `WTF::Vector<std::unique_ptr<BunPlugin::OnResolve>>`,
},
},
}),
];

View File

@@ -43,14 +43,47 @@ const Runtime = @import("../../runtime.zig").Runtime;
const JSLexer = bun.js_lexer;
const Expr = JSAst.Expr;
pub usingnamespace JSC.Codegen.JSTranspiler;
bundler: Bundler.Bundler,
arena: std.heap.ArenaAllocator,
transpiler_options: TranspilerOptions,
scan_pass_result: ScanPassResult,
buffer_writer: ?JSPrinter.BufferWriter = null,
pub const Class = NewClass(
Transpiler,
.{ .name = "Transpiler" },
.{
.scanImports = .{
.rfn = scanImports,
},
.scan = .{
.rfn = scan,
},
.transform = .{
.rfn = transform,
},
.transformSync = .{
.rfn = transformSync,
},
// .resolve = .{
// .rfn = resolve,
// },
// .buildSync = .{
// .rfn = buildSync,
// },
.finalize = finalize,
},
.{},
);
pub const Constructor = JSC.NewConstructor(
@This(),
.{
.constructor = .{ .rfn = constructor },
},
.{},
);
const default_transform_options: Api.TransformOptions = brk: {
var opts = std.mem.zeroes(Api.TransformOptions);
opts.disable_hmr = true;
@@ -97,11 +130,11 @@ pub const TransformTask = struct {
pub const AsyncTransformTask = JSC.ConcurrentPromiseTask(TransformTask);
pub const AsyncTransformEventLoopTask = AsyncTransformTask.EventLoopTask;
pub fn create(transpiler: *Transpiler, protected_input_value: JSC.JSValue, globalThis: *JSGlobalObject, input_code: ZigString, loader: Loader) !*AsyncTransformTask {
pub fn create(transpiler: *Transpiler, protected_input_value: JSC.C.JSValueRef, globalThis: *JSGlobalObject, input_code: ZigString, loader: Loader) !*AsyncTransformTask {
var transform_task = try bun.default_allocator.create(TransformTask);
transform_task.* = .{
.input_code = input_code,
.protected_input_value = protected_input_value,
.protected_input_value = if (protected_input_value != null) JSC.JSValue.fromRef(protected_input_value) else @intToEnum(JSC.JSValue, 0),
.bundler = undefined,
.global = globalThis,
.macro_map = transpiler.transpiler_options.macro_map,
@@ -302,8 +335,8 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject) ?JSAst.Ex
return null;
}
fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice, exception: JSC.C.ExceptionRef) !TranspilerOptions {
var globalThis = globalObject;
fn transformOptionsFromJSC(ctx: JSC.C.JSContextRef, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice, exception: JSC.C.ExceptionRef) !TranspilerOptions {
var globalThis = ctx.ptr();
const object = args.next() orelse return TranspilerOptions{ .log = logger.Log.init(temp_allocator) };
if (object.isUndefinedOrNull()) return TranspilerOptions{ .log = logger.Log.init(temp_allocator) };
@@ -318,18 +351,18 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
transpiler.log.level = .warn;
if (!object.isObject()) {
JSC.throwInvalidArguments("Expected an object", .{}, globalObject, exception);
JSC.throwInvalidArguments("Expected an object", .{}, ctx, exception);
return transpiler;
}
if (object.getIfPropertyExists(globalObject, "define")) |define| {
if (object.getIfPropertyExists(ctx.ptr(), "define")) |define| {
define: {
if (define.isUndefinedOrNull()) {
break :define;
}
if (!define.isObject()) {
JSC.throwInvalidArguments("define must be an object", .{}, globalObject, exception);
JSC.throwInvalidArguments("define must be an object", .{}, ctx, exception);
return transpiler;
}
@@ -351,7 +384,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
const value_type = property_value.jsType();
if (!value_type.isStringLike()) {
JSC.throwInvalidArguments("define \"{s}\" must be a JSON string", .{prop}, globalObject, exception);
JSC.throwInvalidArguments("define \"{s}\" must be a JSON string", .{prop}, ctx, exception);
return transpiler;
}
@@ -392,7 +425,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
var i: usize = 0;
while (iter.next()) |entry| {
if (!entry.jsType().isStringLike()) {
JSC.throwInvalidArguments("external must be a string or string[]", .{}, globalObject, exception);
JSC.throwInvalidArguments("external must be a string or string[]", .{}, ctx, exception);
return transpiler;
}
@@ -405,7 +438,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
transpiler.transform.external = externals[0..i];
} else {
JSC.throwInvalidArguments("external must be a string or string[]", .{}, globalObject, exception);
JSC.throwInvalidArguments("external must be a string or string[]", .{}, ctx, exception);
return transpiler;
}
}
@@ -414,7 +447,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
if (object.get(globalThis, "loader")) |loader| {
if (Loader.fromJS(globalThis, loader, exception)) |resolved| {
if (!resolved.isJavaScriptLike()) {
JSC.throwInvalidArguments("only JavaScript-like loaders supported for now", .{}, globalObject, exception);
JSC.throwInvalidArguments("only JavaScript-like loaders supported for now", .{}, ctx, exception);
return transpiler;
}
@@ -443,7 +476,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
var out = JSC.ZigString.init("");
if (kind.isArray()) {
JSC.throwInvalidArguments("tsconfig must be a string or object", .{}, globalObject, exception);
JSC.throwInvalidArguments("tsconfig must be a string or object", .{}, ctx, exception);
return transpiler;
}
@@ -481,7 +514,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
const kind = macros.jsType();
const is_object = kind.isObject();
if (!(kind.isStringLike() or is_object)) {
JSC.throwInvalidArguments("macro must be an object", .{}, globalObject, exception);
JSC.throwInvalidArguments("macro must be an object", .{}, ctx, exception);
return transpiler;
}
@@ -521,7 +554,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
transpiler.runtime.jsx_optimization_hoist = flag.toBoolean();
if (!transpiler.runtime.jsx_optimization_inline and transpiler.runtime.jsx_optimization_hoist) {
JSC.throwInvalidArguments("jsxOptimizationHoist requires jsxOptimizationInline", .{}, globalObject, exception);
JSC.throwInvalidArguments("jsxOptimizationHoist requires jsxOptimizationInline", .{}, ctx, exception);
return transpiler;
}
}
@@ -546,7 +579,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
if (options.SourceMapOption.map.get(sourcemap.slice())) |source| {
transpiler.transform.source_map = source.toAPI();
} else {
JSC.throwInvalidArguments("sourcemap must be one of \"inline\", \"external\", or \"none\"", .{}, globalObject, exception);
JSC.throwInvalidArguments("sourcemap must be one of \"inline\", \"external\", or \"none\"", .{}, ctx, exception);
return transpiler;
}
}
@@ -564,7 +597,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
if (object.getTruthy(globalThis, "exports")) |exports| {
if (!exports.isObject()) {
JSC.throwInvalidArguments("exports must be an object", .{}, globalObject, exception);
JSC.throwInvalidArguments("exports must be an object", .{}, ctx, exception);
return transpiler;
}
@@ -573,7 +606,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
if (exports.getTruthy(globalThis, "eliminate")) |eliminate| {
if (!eliminate.jsType().isArray()) {
JSC.throwInvalidArguments("exports.eliminate must be an array", .{}, globalObject, exception);
JSC.throwInvalidArguments("exports.eliminate must be an array", .{}, ctx, exception);
return transpiler;
}
@@ -601,7 +634,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
var str = value.getZigString(globalThis);
if (str.len == 0) continue;
const name = std.fmt.bufPrint(buf.items.ptr[buf.items.len..buf.capacity], "{}", .{str}) catch {
JSC.throwInvalidArguments("Error reading exports.eliminate. TODO: utf-16", .{}, globalObject, exception);
JSC.throwInvalidArguments("Error reading exports.eliminate. TODO: utf-16", .{}, ctx, exception);
return transpiler;
};
buf.items.len += name.len;
@@ -615,7 +648,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
if (exports.getTruthy(globalThis, "replace")) |replace| {
if (!replace.isObject()) {
JSC.throwInvalidArguments("replace must be an object", .{}, globalObject, exception);
JSC.throwInvalidArguments("replace must be an object", .{}, ctx, exception);
return transpiler;
}
@@ -645,7 +678,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
var key = try key_.toOwnedSlice(bun.default_allocator);
if (!JSLexer.isIdentifier(key)) {
JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{key}, globalObject, exception);
JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{key}, ctx, exception);
bun.default_allocator.free(key);
return transpiler;
}
@@ -657,7 +690,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
continue;
}
if (value.isObject() and value.getLengthOfArray(globalObject) == 2) {
if (value.isObject() and value.getLengthOfArray(ctx.ptr()) == 2) {
const replacementValue = JSC.JSObject.getIndex(value, globalThis, 1);
if (exportReplacementValue(replacementValue, globalThis)) |to_replace| {
const replacementKey = JSC.JSObject.getIndex(value, globalThis, 0);
@@ -665,7 +698,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
var replacement_name = slice.slice();
if (!JSLexer.isIdentifier(replacement_name)) {
JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{replacement_name}, globalObject, exception);
JSC.throwInvalidArguments("\"{s}\" is not a valid ECMAScript identifier", .{replacement_name}, ctx, exception);
slice.deinit();
return transpiler;
}
@@ -680,7 +713,7 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
}
}
JSC.throwInvalidArguments("exports.replace values can only be string, null, undefined, number or boolean", .{}, globalObject, exception);
JSC.throwInvalidArguments("exports.replace values can only be string, null, undefined, number or boolean", .{}, ctx, exception);
return transpiler;
}
}
@@ -697,60 +730,48 @@ fn transformOptionsFromJSC(globalObject: JSC.C.JSContextRef, temp_allocator: std
}
pub fn constructor(
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
) callconv(.C) ?*Transpiler {
var temp = std.heap.ArenaAllocator.init(getAllocator(globalThis));
const arguments = callframe.arguments(3);
var args = JSC.Node.ArgumentsSlice.init(
globalThis.bunVM(),
arguments.ptr[0..arguments.len],
);
ctx: js.JSContextRef,
_: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) js.JSObjectRef {
var temp = std.heap.ArenaAllocator.init(getAllocator(ctx));
var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), @ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]);
defer temp.deinit();
var exception_ref = [_]JSC.C.JSValueRef{null};
var exception = &exception_ref[0];
const transpiler_options: TranspilerOptions = if (arguments.len > 0)
transformOptionsFromJSC(globalThis, temp.allocator(), &args, exception) catch {
JSC.throwInvalidArguments("Failed to create transpiler", .{}, globalThis, exception);
transformOptionsFromJSC(ctx, temp.allocator(), &args, exception) catch {
JSC.throwInvalidArguments("Failed to create transpiler", .{}, ctx, exception);
return null;
}
else
TranspilerOptions{ .log = logger.Log.init(getAllocator(globalThis)) };
TranspilerOptions{ .log = logger.Log.init(getAllocator(ctx)) };
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return null;
}
const allocator = getAllocator(globalThis);
if ((transpiler_options.log.warnings + transpiler_options.log.errors) > 0) {
globalThis.throwValue(
transpiler_options.log.toJS(globalThis.ptr(), allocator, "Failed to create transpiler"),
);
var out_exception = transpiler_options.log.toJS(ctx.ptr(), getAllocator(ctx), "Failed to create transpiler");
exception.* = out_exception.asObjectRef();
return null;
}
var log = allocator.create(logger.Log) catch unreachable;
var log = getAllocator(ctx).create(logger.Log) catch unreachable;
log.* = transpiler_options.log;
var bundler = Bundler.Bundler.init(
allocator,
getAllocator(ctx),
log,
transpiler_options.transform,
null,
JavaScript.VirtualMachine.get().bundler.env,
) catch |err| {
if ((log.warnings + log.errors) > 0) {
globalThis.throwValue(
log.toJS(globalThis.ptr(), allocator, "Failed to create transpiler"),
);
var out_exception = log.toJS(ctx.ptr(), getAllocator(ctx), "Failed to create transpiler");
exception.* = out_exception.asObjectRef();
return null;
}
globalThis.throwError(err, "Error creating transpiler");
JSC.throwInvalidArguments("Error creating transpiler: {s}", .{@errorName(err)}, ctx, exception);
return null;
};
@@ -758,14 +779,12 @@ pub fn constructor(
bundler.options.env.behavior = .disable;
bundler.configureDefines() catch |err| {
if ((log.warnings + log.errors) > 0) {
globalThis.throwValue(
log.toJS(globalThis.ptr(), allocator, "Failed to load define"),
);
var out_exception = log.toJS(ctx.ptr(), getAllocator(ctx), "Failed to load define");
exception.* = out_exception.asObjectRef();
return null;
}
globalThis.throwError(err, "Failed to load define");
JSC.throwInvalidArguments("Failed to load define: {s}", .{@errorName(err)}, ctx, exception);
return null;
};
@@ -783,20 +802,20 @@ pub fn constructor(
bundler.options.jsx.supports_fast_refresh = bundler.options.hot_module_reloading and
bundler.options.allow_runtime and transpiler_options.runtime.react_fast_refresh;
var transpiler = allocator.create(Transpiler) catch unreachable;
var transpiler = getAllocator(ctx).create(Transpiler) catch unreachable;
transpiler.* = Transpiler{
.transpiler_options = transpiler_options,
.bundler = bundler,
.arena = args.arena,
.scan_pass_result = ScanPassResult.init(allocator),
.scan_pass_result = ScanPassResult.init(getAllocator(ctx)),
};
return transpiler;
return Class.make(ctx, transpiler);
}
pub fn finalize(
this: *Transpiler,
) callconv(.C) void {
) void {
this.bundler.log.deinit();
this.scan_pass_result.named_imports.deinit();
this.scan_pass_result.import_records.deinit();
@@ -808,7 +827,6 @@ pub fn finalize(
// bun.default_allocator.free(this.transpiler_options.tsconfig_buf);
// bun.default_allocator.free(this.transpiler_options.macros_buf);
this.arena.deinit();
JSC.VirtualMachine.get().allocator.destroy(this);
}
fn getParseResult(this: *Transpiler, allocator: std.mem.Allocator, code: []const u8, loader: ?Loader, macro_js_ctx: Bundler.MacroJSValueType) ?Bundler.ParseResult {
@@ -851,41 +869,37 @@ fn getParseResult(this: *Transpiler, allocator: std.mem.Allocator, code: []const
pub fn scan(
this: *Transpiler,
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
) callconv(.C) JSC.JSValue {
ctx: js.JSContextRef,
_: js.JSObjectRef,
_: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) JSC.C.JSObjectRef {
JSC.markBinding(@src());
const arguments = callframe.arguments(3);
var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.ptr[0..arguments.len]);
var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), @ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]);
defer args.arena.deinit();
const code_arg = args.next() orelse {
globalThis.throwInvalidArgumentType("scan", "code", "string or Uint8Array");
return .zero;
JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code_holder = JSC.Node.SliceOrBuffer.fromJS(globalThis, args.arena.allocator(), code_arg) orelse {
globalThis.throwInvalidArgumentType("scan", "code", "string or Uint8Array");
return .zero;
const code_holder = JSC.Node.StringOrBuffer.fromJS(ctx.ptr(), args.arena.allocator(), code_arg, exception) orelse {
if (exception.* == null) JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code = code_holder.slice();
args.eat();
var exception_ref = [_]JSC.C.JSValueRef{null};
var exception: JSC.C.ExceptionRef = &exception_ref;
const loader: ?Loader = brk: {
if (args.next()) |arg| {
args.eat();
break :brk Loader.fromJS(globalThis, arg, exception);
break :brk Loader.fromJS(ctx.ptr(), arg, exception);
}
break :brk null;
};
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
if (exception.* != null) return null;
var arena = Mimalloc.Arena.init() catch unreachable;
var prev_allocator = this.bundler.allocator;
@@ -906,36 +920,34 @@ pub fn scan(
const parse_result = getParseResult(this, arena.allocator(), code, loader, Bundler.MacroJSValueType.zero) orelse {
if ((this.bundler.log.warnings + this.bundler.log.errors) > 0) {
globalThis.throwValue(this.bundler.log.toJS(globalThis, globalThis.allocator(), "Parse error"));
return .zero;
var out_exception = this.bundler.log.toJS(ctx.ptr(), getAllocator(ctx), "Parse error");
exception.* = out_exception.asObjectRef();
return null;
}
globalThis.throw("Failed to parse", .{});
return .zero;
JSC.throwInvalidArguments("Failed to parse", .{}, ctx, exception);
return null;
};
if ((this.bundler.log.warnings + this.bundler.log.errors) > 0) {
globalThis.throwValue(this.bundler.log.toJS(globalThis, globalThis.allocator(), "Parse error"));
return .zero;
var out_exception = this.bundler.log.toJS(ctx.ptr(), getAllocator(ctx), "Parse error");
exception.* = out_exception.asObjectRef();
return null;
}
const exports_label = JSC.ZigString.static("exports");
const imports_label = JSC.ZigString.static("imports");
const exports_label = JSC.ZigString.init("exports");
const imports_label = JSC.ZigString.init("imports");
const named_imports_value = namedImportsToJS(
globalThis,
ctx.ptr(),
parse_result.ast.import_records,
exception,
);
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
const named_exports_value = namedExportsToJS(
globalThis,
if (exception.* != null) return null;
var named_exports_value = namedExportsToJS(
ctx.ptr(),
parse_result.ast.named_exports,
);
return JSC.JSValue.createObject2(globalThis, imports_label, exports_label, named_imports_value, named_exports_value);
return JSC.JSValue.createObject2(ctx.ptr(), &imports_label, &exports_label, named_imports_value, named_exports_value).asObjectRef();
}
// pub fn build(
@@ -949,23 +961,24 @@ pub fn scan(
pub fn transform(
this: *Transpiler,
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
) callconv(.C) JSC.JSValue {
ctx: js.JSContextRef,
_: js.JSObjectRef,
_: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) JSC.C.JSObjectRef {
JSC.markBinding(@src());
var exception_ref = [_]JSC.C.JSValueRef{null};
var exception: JSC.C.ExceptionRef = &exception_ref;
const arguments = callframe.arguments(3);
var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.ptr[0..arguments.len]);
var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), @ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]);
defer args.arena.deinit();
const code_arg = args.next() orelse {
globalThis.throwInvalidArgumentType("transform", "code", "string or Uint8Array");
return .zero;
JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code_holder = JSC.Node.StringOrBuffer.fromJS(globalThis, this.arena.allocator(), code_arg, exception) orelse {
globalThis.throwInvalidArgumentType("transform", "code", "string or Uint8Array");
return .zero;
const code_holder = JSC.Node.StringOrBuffer.fromJS(ctx.ptr(), this.arena.allocator(), code_arg, exception) orelse {
if (exception.* == null) JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code = code_holder.slice();
@@ -973,62 +986,47 @@ pub fn transform(
const loader: ?Loader = brk: {
if (args.next()) |arg| {
args.eat();
break :brk Loader.fromJS(globalThis, arg, exception);
break :brk Loader.fromJS(ctx.ptr(), arg, exception);
}
break :brk null;
};
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
if (exception.* != null) return null;
if (code_holder == .string) {
arguments.ptr[0].ensureStillAlive();
JSC.C.JSValueProtect(ctx, arguments[0]);
}
var task = TransformTask.create(
this,
if (code_holder == .string) arguments.ptr[0] else .zero,
globalThis,
ZigString.init(code),
loader orelse this.transpiler_options.default_loader,
) catch {
globalThis.throw("Out of memory", .{});
return .zero;
};
var task = TransformTask.create(this, if (code_holder == .string) arguments[0] else null, ctx.ptr(), ZigString.init(code), loader orelse this.transpiler_options.default_loader) catch return null;
task.schedule();
return task.promise.value();
return task.promise.value().asObjectRef();
}
pub fn transformSync(
this: *Transpiler,
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
) callconv(.C) JSC.JSValue {
JSC.markBinding(@src());
var exception_value = [_]JSC.C.JSValueRef{null};
var exception: JSC.C.ExceptionRef = &exception_value;
const arguments = callframe.arguments(3);
var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.ptr[0..arguments.len]);
ctx: js.JSContextRef,
_: js.JSObjectRef,
_: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) JSC.C.JSObjectRef {
var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), @ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]);
defer args.arena.deinit();
const code_arg = args.next() orelse {
globalThis.throwInvalidArgumentType("transformSync", "code", "string or Uint8Array");
return .zero;
JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
var arena = Mimalloc.Arena.init() catch unreachable;
defer arena.deinit();
const code_holder = JSC.Node.StringOrBuffer.fromJS(globalThis, arena.allocator(), code_arg, exception) orelse {
globalThis.throwInvalidArgumentType("transformSync", "code", "string or Uint8Array");
return .zero;
const code_holder = JSC.Node.StringOrBuffer.fromJS(ctx.ptr(), arena.allocator(), code_arg, exception) orelse {
if (exception.* == null) JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code = code_holder.slice();
arguments.ptr[0].ensureStillAlive();
defer arguments.ptr[0].ensureStillAlive();
JSC.JSValue.c(arguments[0]).ensureStillAlive();
defer JSC.JSValue.c(arguments[0]).ensureStillAlive();
args.eat();
var js_ctx_value: JSC.JSValue = JSC.JSValue.zero;
@@ -1036,7 +1034,7 @@ pub fn transformSync(
if (args.next()) |arg| {
args.eat();
if (arg.isNumber() or arg.isString()) {
break :brk Loader.fromJS(globalThis, arg, exception);
break :brk Loader.fromJS(ctx.ptr(), arg, exception);
}
if (arg.isObject()) {
@@ -1052,8 +1050,8 @@ pub fn transformSync(
if (arg.isObject()) {
js_ctx_value = arg;
} else {
globalThis.throwInvalidArgumentType("transformSync", "context", "object or loader");
return .zero;
JSC.throwInvalidArguments("Expected a Loader or object", .{}, ctx, exception);
return null;
}
}
if (!js_ctx_value.isEmpty()) {
@@ -1066,10 +1064,7 @@ pub fn transformSync(
}
}
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
if (exception.* != null) return null;
JSAst.Stmt.Data.Store.reset();
JSAst.Expr.Data.Store.reset();
@@ -1095,23 +1090,25 @@ pub fn transformSync(
if (comptime JSC.is_bindgen) Bundler.MacroJSValueType.zero else js_ctx_value,
) orelse {
if ((this.bundler.log.warnings + this.bundler.log.errors) > 0) {
globalThis.throwValue(this.bundler.log.toJS(globalThis, globalThis.allocator(), "Parse error"));
return .zero;
var out_exception = this.bundler.log.toJS(ctx.ptr(), getAllocator(ctx), "Parse error");
exception.* = out_exception.asObjectRef();
return null;
}
globalThis.throw("Failed to parse code", .{});
return .zero;
JSC.throwInvalidArguments("Failed to parse", .{}, ctx, exception);
return null;
};
if ((this.bundler.log.warnings + this.bundler.log.errors) > 0) {
globalThis.throwValue(this.bundler.log.toJS(globalThis, globalThis.allocator(), "Parse error"));
return .zero;
var out_exception = this.bundler.log.toJS(ctx.ptr(), getAllocator(ctx), "Parse error");
exception.* = out_exception.asObjectRef();
return null;
}
var buffer_writer = this.buffer_writer orelse brk: {
var writer = JSPrinter.BufferWriter.init(arena.backingAllocator()) catch {
globalThis.throw("Failed to create BufferWriter", .{});
return .zero;
JSC.throwInvalidArguments("Failed to create BufferWriter", .{}, ctx, exception);
return null;
};
writer.buffer.growIfNeeded(code.len) catch unreachable;
@@ -1126,16 +1123,18 @@ pub fn transformSync(
buffer_writer.reset();
var printer = JSPrinter.BufferPrinter.init(buffer_writer);
_ = this.bundler.print(parse_result, @TypeOf(&printer), &printer, .esm_ascii) catch |err| {
globalThis.throwError(err, "Failed to print code");
return .zero;
JSC.JSError(bun.default_allocator, "Failed to print code: {s}", .{@errorName(err)}, ctx, exception);
return null;
};
// TODO: benchmark if pooling this way is faster or moving is faster
buffer_writer = printer.ctx;
var out = JSC.ZigString.init(buffer_writer.written);
out.mark();
out.setOutputEncoding();
return out.toValueGC(globalThis);
return out.toValueGC(ctx.ptr()).asObjectRef();
}
fn namedExportsToJS(global: *JSGlobalObject, named_exports: JSAst.Ast.NamedExports) JSC.JSValue {
@@ -1170,8 +1169,8 @@ fn namedImportsToJS(
var allocator = stack_fallback.get();
var i: usize = 0;
const path_label = JSC.ZigString.static("path");
const kind_label = JSC.ZigString.static("kind");
const path_label = JSC.ZigString.init("path");
const kind_label = JSC.ZigString.init("kind");
var array_items = allocator.alloc(
JSC.C.JSValueRef,
import_records.len,
@@ -1183,7 +1182,7 @@ fn namedImportsToJS(
const path = JSC.ZigString.init(record.path.text).toValueGC(global);
const kind = JSC.ZigString.init(record.kind.label()).toValue(global);
array_items[i] = JSC.JSValue.createObject2(global, path_label, kind_label, path, kind).asObjectRef();
array_items[i] = JSC.JSValue.createObject2(global, &path_label, &kind_label, path, kind).asObjectRef();
i += 1;
}
@@ -1192,47 +1191,39 @@ fn namedImportsToJS(
pub fn scanImports(
this: *Transpiler,
globalThis: *JSC.JSGlobalObject,
callframe: *JSC.CallFrame,
) callconv(.C) JSC.JSValue {
const arguments = callframe.arguments(2);
var exception_val = [_]JSC.C.JSValueRef{null};
var exception: JSC.C.ExceptionRef = &exception_val;
var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.ptr[0..arguments.len]);
ctx: js.JSContextRef,
_: js.JSObjectRef,
_: js.JSObjectRef,
arguments: []const js.JSValueRef,
exception: js.ExceptionRef,
) JSC.C.JSObjectRef {
var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), @ptrCast([*]const JSC.JSValue, arguments.ptr)[0..arguments.len]);
const code_arg = args.next() orelse {
globalThis.throwInvalidArgumentType("scanImports", "code", "string or Uint8Array");
return .zero;
JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
const code_holder = JSC.Node.StringOrBuffer.fromJS(globalThis, args.arena.allocator(), code_arg, exception) orelse {
if (exception.* == null) {
globalThis.throwInvalidArgumentType("scanImports", "code", "string or Uint8Array");
} else {
globalThis.throwValue(JSC.JSValue.c(exception.*));
}
return .zero;
const code_holder = JSC.Node.StringOrBuffer.fromJS(ctx.ptr(), args.arena.allocator(), code_arg, exception) orelse {
if (exception.* == null) JSC.throwInvalidArguments("Expected a string or Uint8Array", .{}, ctx, exception);
return null;
};
args.eat();
const code = code_holder.slice();
var loader: Loader = this.transpiler_options.default_loader;
if (args.next()) |arg| {
if (Loader.fromJS(globalThis, arg, exception)) |_loader| {
if (Loader.fromJS(ctx.ptr(), arg, exception)) |_loader| {
loader = _loader;
}
args.eat();
}
if (!loader.isJavaScriptLike()) {
globalThis.throwInvalidArguments("Only JavaScript-like files support this fast path", .{});
return .zero;
JSC.throwInvalidArguments("Only JavaScript-like files support this fast path", .{}, ctx, exception);
return null;
}
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
if (exception.* != null) return null;
var arena = Mimalloc.Arena.init() catch unreachable;
var prev_allocator = this.bundler.allocator;
@@ -1277,29 +1268,28 @@ pub fn scanImports(
) catch |err| {
defer this.scan_pass_result.reset();
if ((log.warnings + log.errors) > 0) {
globalThis.throwValue(log.toJS(globalThis, globalThis.allocator(), "Failed to scan imports"));
return .zero;
var out_exception = log.toJS(ctx.ptr(), getAllocator(ctx), "Failed to scan imports");
exception.* = out_exception.asObjectRef();
return null;
}
globalThis.throwError(err, "Failed to scan imports");
return .zero;
JSC.throwInvalidArguments("Failed to scan imports: {s}", .{@errorName(err)}, ctx, exception);
return null;
};
defer this.scan_pass_result.reset();
if ((log.warnings + log.errors) > 0) {
globalThis.throwValue(log.toJS(globalThis, globalThis.allocator(), "Failed to scan imports"));
return .zero;
var out_exception = log.toJS(ctx.ptr(), getAllocator(ctx), "Failed to scan imports");
exception.* = out_exception.asObjectRef();
return null;
}
const named_imports_value = namedImportsToJS(
globalThis,
ctx.ptr(),
this.scan_pass_result.import_records.items,
exception,
);
if (exception.* != null) {
globalThis.throwValue(JSC.JSValue.c(exception.*));
return .zero;
}
return named_imports_value;
if (exception.* != null) return null;
return named_imports_value.asObjectRef();
}

View File

@@ -6,10 +6,13 @@ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf,
__hasOwnProp = Object.prototype.hasOwnProperty;
var __markAsModule = target => __defProp(target, "__esModule", { value: !0 });
var __markAsModule = (target) => __defProp(target, "__esModule", { value: !0 });
var __commonJS = (cb, mod) =>
function () {
return mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
return (
mod || (0, cb[Object.keys(cb)[0]])((mod = { exports: {} }).exports, mod),
mod.exports
);
};
var __reExport = (target, module2, desc) => {
if ((module2 && typeof module2 == "object") || typeof module2 == "function")
@@ -18,11 +21,12 @@ var __reExport = (target, module2, desc) => {
key !== "default" &&
__defProp(target, key, {
get: () => module2[key],
enumerable: !(desc = __getOwnPropDesc(module2, key)) || desc.enumerable,
enumerable:
!(desc = __getOwnPropDesc(module2, key)) || desc.enumerable,
});
return target;
},
__toModule = module2 =>
__toModule = (module2) =>
__reExport(
__markAsModule(
__defProp(
@@ -36,7 +40,7 @@ var __reExport = (target, module2, desc) => {
module2,
);
var require = path => import.meta.require(path);
var require = (path) => import.meta.require(path);
// assert/build/internal/errors.js
var require_errors = __commonJS({
@@ -49,7 +53,10 @@ var require_errors = __commonJS({
return typeof obj2;
})
: (_typeof = function (obj2) {
return obj2 && typeof Symbol == "function" && obj2.constructor === Symbol && obj2 !== Symbol.prototype
return obj2 &&
typeof Symbol == "function" &&
obj2.constructor === Symbol &&
obj2 !== Symbol.prototype
? "symbol"
: typeof obj2;
}),
@@ -57,13 +64,19 @@ var require_errors = __commonJS({
);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
if (!(instance instanceof Constructor))
throw new TypeError("Cannot call a class as a function");
}
function _possibleConstructorReturn(self, call) {
return call && (_typeof(call) === "object" || typeof call == "function") ? call : _assertThisInitialized(self);
return call && (_typeof(call) === "object" || typeof call == "function")
? call
: _assertThisInitialized(self);
}
function _assertThisInitialized(self) {
if (self === void 0) throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
if (self === void 0)
throw new ReferenceError(
"this hasn't been initialised - super() hasn't been called",
);
return self;
}
function _getPrototypeOf(o) {
@@ -78,7 +91,9 @@ var require_errors = __commonJS({
}
function _inherits(subClass, superClass) {
if (typeof superClass != "function" && superClass !== null)
throw new TypeError("Super expression must either be null or a function");
throw new TypeError(
"Super expression must either be null or a function",
);
(subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: { value: subClass, writable: !0, configurable: !0 },
})),
@@ -110,7 +125,10 @@ var require_errors = __commonJS({
_classCallCheck(this, NodeError2),
(_this = _possibleConstructorReturn(
this,
_getPrototypeOf(NodeError2).call(this, getMessage(arg1, arg2, arg3)),
_getPrototypeOf(NodeError2).call(
this,
getMessage(arg1, arg2, arg3),
),
)),
(_this.code = code),
_this
@@ -128,9 +146,15 @@ var require_errors = __commonJS({
return String(i);
})),
len > 2
? "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(", "), ", or ") + expected[len - 1]
? "one of "
.concat(thing, " ")
.concat(expected.slice(0, len - 1).join(", "), ", or ") +
expected[len - 1]
: len === 2
? "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1])
? "one of "
.concat(thing, " ")
.concat(expected[0], " or ")
.concat(expected[1])
: "of ".concat(thing, " ").concat(expected[0])
);
} else return "of ".concat(thing, " ").concat(String(expected));
@@ -140,31 +164,47 @@ var require_errors = __commonJS({
}
function endsWith(str, search, this_len) {
return (
(this_len === void 0 || this_len > str.length) && (this_len = str.length),
(this_len === void 0 || this_len > str.length) &&
(this_len = str.length),
str.substring(this_len - search.length, this_len) === search
);
}
function includes(str, search, start) {
return (
typeof start != "number" && (start = 0),
start + search.length > str.length ? !1 : str.indexOf(search, start) !== -1
start + search.length > str.length
? !1
: str.indexOf(search, start) !== -1
);
}
createErrorType("ERR_AMBIGUOUS_ARGUMENT", 'The "%s" argument is ambiguous. %s', TypeError);
createErrorType(
"ERR_AMBIGUOUS_ARGUMENT",
'The "%s" argument is ambiguous. %s',
TypeError,
);
createErrorType(
"ERR_INVALID_ARG_TYPE",
function (name, expected, actual) {
assert === void 0 && (assert = require_assert()), assert(typeof name == "string", "'name' must be a string");
assert === void 0 && (assert = require_assert()),
assert(typeof name == "string", "'name' must be a string");
var determiner;
typeof expected == "string" && startsWith(expected, "not ")
? ((determiner = "must not be"), (expected = expected.replace(/^not /, "")))
? ((determiner = "must not be"),
(expected = expected.replace(/^not /, "")))
: (determiner = "must be");
var msg;
if (endsWith(name, " argument"))
msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, "type"));
msg = "The "
.concat(name, " ")
.concat(determiner, " ")
.concat(oneOf(expected, "type"));
else {
var type = includes(name, ".") ? "property" : "argument";
msg = 'The "'.concat(name, '" ').concat(type, " ").concat(determiner, " ").concat(oneOf(expected, "type"));
msg = 'The "'
.concat(name, '" ')
.concat(type, " ")
.concat(determiner, " ")
.concat(oneOf(expected, "type"));
}
return (msg += ". Received type ".concat(_typeof(actual))), msg;
},
@@ -173,12 +213,19 @@ var require_errors = __commonJS({
createErrorType(
"ERR_INVALID_ARG_VALUE",
function (name, value) {
var reason = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : "is invalid";
var reason =
arguments.length > 2 && arguments[2] !== void 0
? arguments[2]
: "is invalid";
util === void 0 && (util = require("util"));
var inspected = util.inspect(value);
return (
inspected.length > 128 && (inspected = "".concat(inspected.slice(0, 128), "...")),
"The argument '".concat(name, "' ").concat(reason, ". Received ").concat(inspected)
inspected.length > 128 &&
(inspected = "".concat(inspected.slice(0, 128), "...")),
"The argument '"
.concat(name, "' ")
.concat(reason, ". Received ")
.concat(inspected)
);
},
TypeError,
@@ -192,8 +239,9 @@ var require_errors = __commonJS({
value && value.constructor && value.constructor.name
? (type = "instance of ".concat(value.constructor.name))
: (type = "type ".concat(_typeof(value))),
"Expected ".concat(input, ' to be returned from the "').concat(name, '"') +
" function but got ".concat(type, ".")
"Expected "
.concat(input, ' to be returned from the "')
.concat(name, '"') + " function but got ".concat(type, ".")
);
},
TypeError,
@@ -201,7 +249,11 @@ var require_errors = __commonJS({
createErrorType(
"ERR_MISSING_ARGS",
function () {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++)
for (
var _len = arguments.length, args = new Array(_len), _key = 0;
_key < _len;
_key++
)
args[_key] = arguments[_key];
assert === void 0 && (assert = require_assert()),
assert(args.length > 0, "At least one arg needs to be specified");
@@ -220,7 +272,8 @@ var require_errors = __commonJS({
msg += "".concat(args[0], " and ").concat(args[1], " arguments");
break;
default:
(msg += args.slice(0, len - 1).join(", ")), (msg += ", and ".concat(args[len - 1], " arguments"));
(msg += args.slice(0, len - 1).join(", ")),
(msg += ", and ".concat(args[len - 1], " arguments"));
break;
}
return "".concat(msg, " must be specified");
@@ -265,7 +318,8 @@ var require_assertion_error = __commonJS({
);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
if (!(instance instanceof Constructor))
throw new TypeError("Cannot call a class as a function");
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
@@ -284,15 +338,22 @@ var require_assertion_error = __commonJS({
);
}
function _possibleConstructorReturn(self, call) {
return call && (_typeof(call) === "object" || typeof call == "function") ? call : _assertThisInitialized(self);
return call && (_typeof(call) === "object" || typeof call == "function")
? call
: _assertThisInitialized(self);
}
function _assertThisInitialized(self) {
if (self === void 0) throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
if (self === void 0)
throw new ReferenceError(
"this hasn't been initialised - super() hasn't been called",
);
return self;
}
function _inherits(subClass, superClass) {
if (typeof superClass != "function" && superClass !== null)
throw new TypeError("Super expression must either be null or a function");
throw new TypeError(
"Super expression must either be null or a function",
);
(subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: { value: subClass, writable: !0, configurable: !0 },
})),
@@ -303,13 +364,20 @@ var require_assertion_error = __commonJS({
return (
(_wrapNativeSuper = function (Class2) {
if (Class2 === null || !_isNativeFunction(Class2)) return Class2;
if (typeof Class2 != "function") throw new TypeError("Super expression must either be null or a function");
if (typeof Class2 != "function")
throw new TypeError(
"Super expression must either be null or a function",
);
if (typeof _cache != "undefined") {
if (_cache.has(Class2)) return _cache.get(Class2);
_cache.set(Class2, Wrapper);
}
function Wrapper() {
return _construct(Class2, arguments, _getPrototypeOf(this).constructor);
return _construct(
Class2,
arguments,
_getPrototypeOf(this).constructor,
);
}
return (
(Wrapper.prototype = Object.create(Class2.prototype, {
@@ -327,10 +395,20 @@ var require_assertion_error = __commonJS({
);
}
function isNativeReflectConstruct() {
if (typeof Reflect == "undefined" || !Reflect.construct || Reflect.construct.sham) return !1;
if (
typeof Reflect == "undefined" ||
!Reflect.construct ||
Reflect.construct.sham
)
return !1;
if (typeof Proxy == "function") return !0;
try {
return Date.prototype.toString.call(Reflect.construct(Date, [], function () {})), !0;
return (
Date.prototype.toString.call(
Reflect.construct(Date, [], function () {}),
),
!0
);
} catch {
return !1;
}
@@ -344,7 +422,9 @@ var require_assertion_error = __commonJS({
a.push.apply(a, args2);
var Constructor = Function.bind.apply(Parent2, a),
instance = new Constructor();
return Class2 && _setPrototypeOf(instance, Class2.prototype), instance;
return (
Class2 && _setPrototypeOf(instance, Class2.prototype), instance
);
}),
_construct.apply(null, arguments)
);
@@ -379,7 +459,10 @@ var require_assertion_error = __commonJS({
return typeof obj2;
})
: (_typeof = function (obj2) {
return obj2 && typeof Symbol == "function" && obj2.constructor === Symbol && obj2 !== Symbol.prototype
return obj2 &&
typeof Symbol == "function" &&
obj2.constructor === Symbol &&
obj2 !== Symbol.prototype
? "symbol"
: typeof obj2;
}),
@@ -392,14 +475,17 @@ var require_assertion_error = __commonJS({
ERR_INVALID_ARG_TYPE = _require2.codes.ERR_INVALID_ARG_TYPE;
function endsWith(str, search, this_len) {
return (
(this_len === void 0 || this_len > str.length) && (this_len = str.length),
(this_len === void 0 || this_len > str.length) &&
(this_len = str.length),
str.substring(this_len - search.length, this_len) === search
);
}
function repeat(str, count) {
if (((count = Math.floor(count)), str.length == 0 || count == 0)) return "";
if (((count = Math.floor(count)), str.length == 0 || count == 0))
return "";
var maxCount = str.length * count;
for (count = Math.floor(Math.log(count) / Math.log(2)); count; ) (str += str), count--;
for (count = Math.floor(Math.log(count) / Math.log(2)); count; )
(str += str), count--;
return (str += str.substring(0, maxCount - str.length)), str;
}
var blue = "",
@@ -409,12 +495,15 @@ var require_assertion_error = __commonJS({
kReadableOperator = {
deepStrictEqual: "Expected values to be strictly deep-equal:",
strictEqual: "Expected values to be strictly equal:",
strictEqualObject: 'Expected "actual" to be reference-equal to "expected":',
strictEqualObject:
'Expected "actual" to be reference-equal to "expected":',
deepEqual: "Expected values to be loosely deep-equal:",
equal: "Expected values to be loosely equal:",
notDeepStrictEqual: 'Expected "actual" not to be strictly deep-equal to:',
notDeepStrictEqual:
'Expected "actual" not to be strictly deep-equal to:',
notStrictEqual: 'Expected "actual" to be strictly unequal to:',
notStrictEqualObject: 'Expected "actual" not to be reference-equal to "expected":',
notStrictEqualObject:
'Expected "actual" not to be reference-equal to "expected":',
notDeepEqual: 'Expected "actual" not to be loosely deep-equal to:',
notEqual: 'Expected "actual" to be loosely unequal to:',
notIdentical: "Values identical but not reference-equal:",
@@ -466,7 +555,9 @@ var require_assertion_error = __commonJS({
actual !== null &&
expected !== null &&
(operator = "strictEqualObject"),
actualLines.length === 1 && expectedLines.length === 1 && actualLines[0] !== expectedLines[0])
actualLines.length === 1 &&
expectedLines.length === 1 &&
actualLines[0] !== expectedLines[0])
) {
var inputLength = actualLines[0].length + expectedLines[0].length;
if (inputLength <= kMaxShortLength) {
@@ -489,7 +580,10 @@ var require_assertion_error = __commonJS({
)
);
} else if (operator !== "strictEqualObject") {
var maxLength = process.stderr && process.stderr.isTTY ? process.stderr.columns : 80;
var maxLength =
process.stderr && process.stderr.isTTY
? process.stderr.columns
: 80;
if (inputLength < maxLength) {
for (; actualLines[0][i] === expectedLines[0][i]; ) i++;
i > 2 &&
@@ -500,7 +594,8 @@ var require_assertion_error = __commonJS({
}
}
for (
var a = actualLines[actualLines.length - 1], b = expectedLines[expectedLines.length - 1];
var a = actualLines[actualLines.length - 1],
b = expectedLines[expectedLines.length - 1];
a === b &&
(i++ < 2
? (end = `
@@ -513,13 +608,19 @@ var require_assertion_error = __commonJS({
!(actualLines.length === 0 || expectedLines.length === 0));
)
(a = actualLines[actualLines.length - 1]), (b = expectedLines[expectedLines.length - 1]);
(a = actualLines[actualLines.length - 1]),
(b = expectedLines[expectedLines.length - 1]);
var maxLines = Math.max(actualLines.length, expectedLines.length);
if (maxLines === 0) {
var _actualLines = actualInspected.split(`
`);
if (_actualLines.length > 30)
for (_actualLines[26] = "".concat(blue, "...").concat(white); _actualLines.length > 27; ) _actualLines.pop();
for (
_actualLines[26] = "".concat(blue, "...").concat(white);
_actualLines.length > 27;
)
_actualLines.pop();
return ""
.concat(
kReadableOperator.notIdentical,
@@ -609,7 +710,9 @@ var require_assertion_error = __commonJS({
var expectedLine = expectedLines[i],
actualLine = actualLines[i],
divergingLines =
actualLine !== expectedLine && (!endsWith(actualLine, ",") || actualLine.slice(0, -1) !== expectedLine);
actualLine !== expectedLine &&
(!endsWith(actualLine, ",") ||
actualLine.slice(0, -1) !== expectedLine);
divergingLines &&
endsWith(expectedLine, ",") &&
expectedLine.slice(0, -1) === actualLine &&
@@ -688,7 +791,10 @@ var require_assertion_error = __commonJS({
_inherits(AssertionError2, _Error);
function AssertionError2(options) {
var _this;
if ((_classCallCheck(this, AssertionError2), _typeof(options) !== "object" || options === null))
if (
(_classCallCheck(this, AssertionError2),
_typeof(options) !== "object" || options === null)
)
throw new ERR_INVALID_ARG_TYPE("options", "Object", options);
var message = options.message,
operator = options.operator,
@@ -697,11 +803,16 @@ var require_assertion_error = __commonJS({
expected = options.expected,
limit = Error.stackTraceLimit;
if (((Error.stackTraceLimit = 0), message != null))
_this = _possibleConstructorReturn(this, _getPrototypeOf(AssertionError2).call(this, String(message)));
_this = _possibleConstructorReturn(
this,
_getPrototypeOf(AssertionError2).call(this, String(message)),
);
else if (
(process.stderr &&
process.stderr.isTTY &&
(process.stderr && process.stderr.getColorDepth && process.stderr.getColorDepth() !== 1
(process.stderr &&
process.stderr.getColorDepth &&
process.stderr.getColorDepth() !== 1
? ((blue = ""), (green = ""), (white = ""), (red = ""))
: ((blue = ""), (green = ""), (white = ""), (red = ""))),
_typeof(actual) === "object" &&
@@ -717,9 +828,15 @@ var require_assertion_error = __commonJS({
)
_this = _possibleConstructorReturn(
this,
_getPrototypeOf(AssertionError2).call(this, createErrDiff(actual, expected, operator)),
_getPrototypeOf(AssertionError2).call(
this,
createErrDiff(actual, expected, operator),
),
);
else if (operator === "notDeepStrictEqual" || operator === "notStrictEqual") {
else if (
operator === "notDeepStrictEqual" ||
operator === "notStrictEqual"
) {
var base = kReadableOperator[operator],
res = inspectValue(actual).split(`
`);
@@ -730,11 +847,19 @@ var require_assertion_error = __commonJS({
(base = kReadableOperator.notStrictEqualObject),
res.length > 30)
)
for (res[26] = "".concat(blue, "...").concat(white); res.length > 27; ) res.pop();
for (
res[26] = "".concat(blue, "...").concat(white);
res.length > 27;
)
res.pop();
res.length === 1
? (_this = _possibleConstructorReturn(
this,
_getPrototypeOf(AssertionError2).call(this, "".concat(base, " ").concat(res[0])),
_getPrototypeOf(AssertionError2).call(
this,
"".concat(base, " ").concat(res[0]),
),
))
: (_this = _possibleConstructorReturn(
this,
@@ -768,10 +893,13 @@ var require_assertion_error = __commonJS({
`,
)
.concat(_res)),
_res.length > 1024 && (_res = "".concat(_res.slice(0, 1021), "...")))
_res.length > 1024 &&
(_res = "".concat(_res.slice(0, 1021), "...")))
: ((other = "".concat(inspectValue(expected))),
_res.length > 512 && (_res = "".concat(_res.slice(0, 509), "...")),
other.length > 512 && (other = "".concat(other.slice(0, 509), "...")),
_res.length > 512 &&
(_res = "".concat(_res.slice(0, 509), "...")),
other.length > 512 &&
(other = "".concat(other.slice(0, 509), "...")),
operator === "deepEqual" || operator === "equal"
? (_res = ""
.concat(
@@ -791,7 +919,10 @@ should equal
: (other = " ".concat(operator, " ").concat(other))),
(_this = _possibleConstructorReturn(
this,
_getPrototypeOf(AssertionError2).call(this, "".concat(_res).concat(other)),
_getPrototypeOf(AssertionError2).call(
this,
"".concat(_res).concat(other),
),
));
}
return (
@@ -807,7 +938,11 @@ should equal
(_this.actual = actual),
(_this.expected = expected),
(_this.operator = operator),
Error.captureStackTrace && Error.captureStackTrace(_assertThisInitialized(_this), stackStartFn),
Error.captureStackTrace &&
Error.captureStackTrace(
_assertThisInitialized(_this),
stackStartFn,
),
_this.stack,
(_this.name = "AssertionError"),
_possibleConstructorReturn(_this)
@@ -818,7 +953,10 @@ should equal
{
key: "toString",
value: function () {
return "".concat(this.name, " [").concat(this.code, "]: ").concat(this.message);
return ""
.concat(this.name, " [")
.concat(this.code, "]: ")
.concat(this.message);
},
},
{
@@ -852,7 +990,10 @@ var require_assert = __commonJS({
return typeof obj2;
})
: (_typeof = function (obj2) {
return obj2 && typeof Symbol == "function" && obj2.constructor === Symbol && obj2 !== Symbol.prototype
return obj2 &&
typeof Symbol == "function" &&
obj2.constructor === Symbol &&
obj2 !== Symbol.prototype
? "symbol"
: typeof obj2;
}),
@@ -860,7 +1001,8 @@ var require_assert = __commonJS({
);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) throw new TypeError("Cannot call a class as a function");
if (!(instance instanceof Constructor))
throw new TypeError("Cannot call a class as a function");
}
var _require = require_errors(),
_require$codes = _require.codes,
@@ -883,7 +1025,9 @@ var require_assert = __commonJS({
assert = (module2.exports = ok),
NO_EXCEPTION_SENTINEL = {};
function innerFail(obj) {
throw obj.message instanceof Error ? obj.message : new AssertionError(obj);
throw obj.message instanceof Error
? obj.message
: new AssertionError(obj);
}
function fail(actual, expected, message, operator, stackStartFn) {
var argsLen = arguments.length,
@@ -893,7 +1037,9 @@ var require_assert = __commonJS({
else {
if (warned === !1) {
warned = !0;
var warn = process.emitWarning ? process.emitWarning : console.warn.bind(console);
var warn = process.emitWarning
? process.emitWarning
: console.warn.bind(console);
warn(
"assert.fail() with more than one argument is deprecated. Please use assert.strictEqual() instead or only pass a message.",
"DeprecationWarning",
@@ -911,14 +1057,20 @@ var require_assert = __commonJS({
};
message !== void 0 && (errArgs.message = message);
var err = new AssertionError(errArgs);
throw (internalMessage && ((err.message = internalMessage), (err.generatedMessage = !0)), err);
throw (
(internalMessage &&
((err.message = internalMessage), (err.generatedMessage = !0)),
err)
);
}
assert.fail = fail;
assert.AssertionError = AssertionError;
function innerOk(fn, argLen, value, message) {
if (!value) {
var generatedMessage = !1;
if (argLen === 0) (generatedMessage = !0), (message = "No value argument passed to `assert.ok()`");
if (argLen === 0)
(generatedMessage = !0),
(message = "No value argument passed to `assert.ok()`");
else if (message instanceof Error) throw message;
var err = new AssertionError({
actual: value,
@@ -931,13 +1083,18 @@ var require_assert = __commonJS({
}
}
function ok() {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++)
for (
var _len = arguments.length, args = new Array(_len), _key = 0;
_key < _len;
_key++
)
args[_key] = arguments[_key];
innerOk.apply(void 0, [ok, args.length].concat(args));
}
assert.ok = ok;
assert.equal = function equal(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
actual != expected &&
innerFail({
actual,
@@ -948,7 +1105,8 @@ var require_assert = __commonJS({
});
};
assert.notEqual = function notEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
actual == expected &&
innerFail({
actual,
@@ -959,7 +1117,8 @@ var require_assert = __commonJS({
});
};
assert.deepEqual = function deepEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
isDeepEqual(actual, expected, false) ||
innerFail({
actual,
@@ -970,7 +1129,8 @@ var require_assert = __commonJS({
});
};
assert.notDeepEqual = function notDeepEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
isDeepEqual(actual, expected, false) &&
innerFail({
actual,
@@ -980,8 +1140,13 @@ var require_assert = __commonJS({
stackStartFn: notDeepEqual,
});
};
assert.deepStrictEqual = function deepStrictEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
assert.deepStrictEqual = function deepStrictEqual(
actual,
expected,
message,
) {
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
isDeepEqual(actual, expected, true) ||
innerFail({
@@ -994,7 +1159,8 @@ var require_assert = __commonJS({
};
assert.notDeepStrictEqual = notDeepStrictEqual;
function notDeepStrictEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
isDeepEqual(actual, expected, true) &&
innerFail({
@@ -1006,7 +1172,8 @@ var require_assert = __commonJS({
});
}
assert.strictEqual = function strictEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
objectIs(actual, expected) ||
innerFail({
actual,
@@ -1017,7 +1184,8 @@ var require_assert = __commonJS({
});
};
assert.notStrictEqual = function notStrictEqual(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
objectIs(actual, expected) &&
innerFail({
actual,
@@ -1028,8 +1196,10 @@ var require_assert = __commonJS({
});
};
assert.match = function match(actual, expected, message) {
if (arguments.length < 2) throw new ERR_MISSING_ARGS("actual", "expected");
if (!isRegExp(expected)) throw new ERR_INVALID_ARG_TYPE("expected", "RegExp", expected);
if (arguments.length < 2)
throw new ERR_MISSING_ARGS("actual", "expected");
if (!isRegExp(expected))
throw new ERR_INVALID_ARG_TYPE("expected", "RegExp", expected);
expected.test(actual) ||
innerFail({
actual,
@@ -1044,7 +1214,10 @@ var require_assert = __commonJS({
_classCallCheck(this, Comparison2),
keys.forEach(function (key) {
key in obj &&
(actual !== void 0 && typeof actual[key] == "string" && isRegExp(obj[key]) && obj[key].test(actual[key])
(actual !== void 0 &&
typeof actual[key] == "string" &&
isRegExp(obj[key]) &&
obj[key].test(actual[key])
? (_this[key] = actual[key])
: (_this[key] = obj[key]));
});
@@ -1060,7 +1233,12 @@ var require_assert = __commonJS({
operator: "deepStrictEqual",
stackStartFn: fn,
});
throw ((err.actual = actual), (err.expected = expected), (err.operator = fn.name), err);
throw (
((err.actual = actual),
(err.expected = expected),
(err.operator = fn.name),
err)
);
}
innerFail({
actual,
@@ -1074,7 +1252,12 @@ var require_assert = __commonJS({
function expectedException(actual, expected, msg, fn) {
if (typeof expected != "function") {
if (isRegExp(expected)) return expected.test(actual);
if (arguments.length === 2) throw new ERR_INVALID_ARG_TYPE("expected", ["Function", "RegExp"], expected);
if (arguments.length === 2)
throw new ERR_INVALID_ARG_TYPE(
"expected",
["Function", "RegExp"],
expected,
);
if (_typeof(actual) !== "object" || actual === null) {
var err = new AssertionError({
actual,
@@ -1087,11 +1270,18 @@ var require_assert = __commonJS({
}
var keys = Object.keys(expected);
if (expected instanceof Error) keys.push("name", "message");
else if (keys.length === 0) throw new ERR_INVALID_ARG_VALUE("error", expected, "may not be an empty object");
else if (keys.length === 0)
throw new ERR_INVALID_ARG_VALUE(
"error",
expected,
"may not be an empty object",
);
return (
keys.forEach(function (key) {
return (
(typeof actual[key] == "string" && isRegExp(expected[key]) && expected[key].test(actual[key])) ||
(typeof actual[key] == "string" &&
isRegExp(expected[key]) &&
expected[key].test(actual[key])) ||
compareExceptionKey(actual, expected, key, msg, keys, fn)
);
}),
@@ -1105,7 +1295,8 @@ var require_assert = __commonJS({
: expected.call({}, actual) === !0;
}
function getActual(fn) {
if (typeof fn != "function") throw new ERR_INVALID_ARG_TYPE("fn", "Function", fn);
if (typeof fn != "function")
throw new ERR_INVALID_ARG_TYPE("fn", "Function", fn);
try {
fn();
} catch (e) {
@@ -1116,7 +1307,10 @@ var require_assert = __commonJS({
function checkIsPromise(obj) {
return (
isPromise(obj) ||
(obj !== null && _typeof(obj) === "object" && typeof obj.then == "function" && typeof obj.catch == "function")
(obj !== null &&
_typeof(obj) === "object" &&
typeof obj.then == "function" &&
typeof obj.catch == "function")
);
}
function waitForActual(promiseFn) {
@@ -1124,9 +1318,18 @@ var require_assert = __commonJS({
var resultPromise;
if (typeof promiseFn == "function") {
if (((resultPromise = promiseFn()), !checkIsPromise(resultPromise)))
throw new ERR_INVALID_RETURN_VALUE("instance of Promise", "promiseFn", resultPromise);
throw new ERR_INVALID_RETURN_VALUE(
"instance of Promise",
"promiseFn",
resultPromise,
);
} else if (checkIsPromise(promiseFn)) resultPromise = promiseFn;
else throw new ERR_INVALID_ARG_TYPE("promiseFn", ["Function", "Promise"], promiseFn);
else
throw new ERR_INVALID_ARG_TYPE(
"promiseFn",
["Function", "Promise"],
promiseFn,
);
return Promise.resolve()
.then(function () {
return resultPromise;
@@ -1142,12 +1345,19 @@ var require_assert = __commonJS({
function expectsError(stackStartFn, actual, error, message) {
if (typeof error == "string") {
if (arguments.length === 4)
throw new ERR_INVALID_ARG_TYPE("error", ["Object", "Error", "Function", "RegExp"], error);
throw new ERR_INVALID_ARG_TYPE(
"error",
["Object", "Error", "Function", "RegExp"],
error,
);
if (_typeof(actual) === "object" && actual !== null) {
if (actual.message === error)
throw new ERR_AMBIGUOUS_ARGUMENT(
"error/message",
'The error message "'.concat(actual.message, '" is identical to the message.'),
'The error message "'.concat(
actual.message,
'" is identical to the message.',
),
);
} else if (actual === error)
throw new ERR_AMBIGUOUS_ARGUMENT(
@@ -1155,13 +1365,22 @@ var require_assert = __commonJS({
'The error "'.concat(actual, '" is identical to the message.'),
);
(message = error), (error = void 0);
} else if (error != null && _typeof(error) !== "object" && typeof error != "function")
throw new ERR_INVALID_ARG_TYPE("error", ["Object", "Error", "Function", "RegExp"], error);
} else if (
error != null &&
_typeof(error) !== "object" &&
typeof error != "function"
)
throw new ERR_INVALID_ARG_TYPE(
"error",
["Object", "Error", "Function", "RegExp"],
error,
);
if (actual === NO_EXCEPTION_SENTINEL) {
var details = "";
error && error.name && (details += " (".concat(error.name, ")")),
(details += message ? ": ".concat(message) : ".");
var fnType = stackStartFn.name === "rejects" ? "rejection" : "exception";
var fnType =
stackStartFn.name === "rejects" ? "rejection" : "exception";
innerFail({
actual: void 0,
expected: error,
@@ -1170,7 +1389,8 @@ var require_assert = __commonJS({
stackStartFn,
});
}
if (error && !expectedException(actual, error, message, stackStartFn)) throw actual;
if (error && !expectedException(actual, error, message, stackStartFn))
throw actual;
}
function expectsNoError(stackStartFn, actual, error, message) {
if (actual !== NO_EXCEPTION_SENTINEL) {
@@ -1179,7 +1399,8 @@ var require_assert = __commonJS({
!error || expectedException(actual, error))
) {
var details = message ? ": ".concat(message) : ".",
fnType = stackStartFn.name === "doesNotReject" ? "rejection" : "exception";
fnType =
stackStartFn.name === "doesNotReject" ? "rejection" : "exception";
innerFail({
actual,
expected: error,
@@ -1197,27 +1418,54 @@ var require_assert = __commonJS({
}
}
assert.throws = function throws(promiseFn) {
for (var _len2 = arguments.length, args = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++)
for (
var _len2 = arguments.length,
args = new Array(_len2 > 1 ? _len2 - 1 : 0),
_key2 = 1;
_key2 < _len2;
_key2++
)
args[_key2 - 1] = arguments[_key2];
expectsError.apply(void 0, [throws, getActual(promiseFn)].concat(args));
};
assert.rejects = function rejects(promiseFn) {
for (var _len3 = arguments.length, args = new Array(_len3 > 1 ? _len3 - 1 : 0), _key3 = 1; _key3 < _len3; _key3++)
for (
var _len3 = arguments.length,
args = new Array(_len3 > 1 ? _len3 - 1 : 0),
_key3 = 1;
_key3 < _len3;
_key3++
)
args[_key3 - 1] = arguments[_key3];
return waitForActual(promiseFn).then(function (result) {
return expectsError.apply(void 0, [rejects, result].concat(args));
});
};
assert.doesNotThrow = function doesNotThrow(fn) {
for (var _len4 = arguments.length, args = new Array(_len4 > 1 ? _len4 - 1 : 0), _key4 = 1; _key4 < _len4; _key4++)
for (
var _len4 = arguments.length,
args = new Array(_len4 > 1 ? _len4 - 1 : 0),
_key4 = 1;
_key4 < _len4;
_key4++
)
args[_key4 - 1] = arguments[_key4];
expectsNoError.apply(void 0, [doesNotThrow, getActual(fn)].concat(args));
};
assert.doesNotReject = function doesNotReject(fn) {
for (var _len5 = arguments.length, args = new Array(_len5 > 1 ? _len5 - 1 : 0), _key5 = 1; _key5 < _len5; _key5++)
for (
var _len5 = arguments.length,
args = new Array(_len5 > 1 ? _len5 - 1 : 0),
_key5 = 1;
_key5 < _len5;
_key5++
)
args[_key5 - 1] = arguments[_key5];
return waitForActual(fn).then(function (result) {
return expectsNoError.apply(void 0, [doesNotReject, result].concat(args));
return expectsNoError.apply(
void 0,
[doesNotReject, result].concat(args),
);
});
};
assert.ifError = function ifError(err) {
@@ -1269,7 +1517,11 @@ var require_assert = __commonJS({
}
};
function strict() {
for (var _len6 = arguments.length, args = new Array(_len6), _key6 = 0; _key6 < _len6; _key6++)
for (
var _len6 = arguments.length, args = new Array(_len6), _key6 = 0;
_key6 < _len6;
_key6++
)
args[_key6] = arguments[_key6];
innerOk.apply(void 0, [strict, args.length].concat(args));
}

Some files were not shown because too many files have changed in this diff Show More