Compare commits

..

7 Commits

Author SHA1 Message Date
Dylan Conway
9c2a7c6b82 git cache folder resolver 2023-01-13 16:04:06 -08:00
Dylan Conway
766f8ceebc Merge branch 'main' into dylan/github-dependencies 2023-01-12 19:10:41 -08:00
Dylan Conway
c03f7c998d git url parsing 2023-01-11 16:18:45 -08:00
Dylan Conway
beb03c3c54 handle github prefix 2023-01-11 14:26:50 -08:00
Dylan Conway
8846ae2454 install github repositories with dependencies 2023-01-11 13:42:36 -08:00
Dylan Conway
a4c379d316 Merge remote-tracking branch 'origin/main' into dylan/github-dependencies 2023-01-11 11:51:25 -08:00
Dylan Conway
25b080a05e github dependencies progress 2023-01-10 20:21:47 -08:00
421 changed files with 33420 additions and 50125 deletions

30
.github/workflows/bun-homebrew.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: bun-homebrew
on:
release:
types:
- published
- edited
jobs:
homebrew:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ env.HOMEBREW_TOKEN }}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.6'
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Release ${{ github.event.release.tag_name }}

View File

@@ -1,165 +0,0 @@
name: bun-linux
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
# - name: Sign Release
# id: sign-release
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# env:
# GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
# run: |
# echo "$GPG_PASSPHRASE" | bun run .scripts/sign-release.ts
# - name: Release Checksum
# id: release-checksum
# uses: ncipollo/release-action@v1
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# with:
# prerelease: true
# body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
# allowUpdates: true
# replacesArtifacts: true
# generateReleaseNotes: true
# artifactErrorsFailBuild: true
# token: ${{ secrets.GITHUB_TOKEN }}
# name: "Canary (${{github.sha}})"
# tag: "canary"
# artifacts: "SHASUMS256.txt,SHASUMS256.txt.asc"

View File

@@ -32,25 +32,32 @@ jobs:
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
@@ -121,20 +128,10 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -146,3 +143,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -145,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -242,14 +242,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -257,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -370,9 +370,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -243,14 +243,14 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -258,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -374,9 +374,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -119,16 +119,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -137,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,14 +245,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -260,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -376,9 +376,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -1,116 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -1,176 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Bun v${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>

View File

@@ -1,43 +0,0 @@
name: bun-test
on:
push:
branches:
- main
- "test/*"
paths:
- "src/**/*"
- "test/**/*"
pull_request:
branches:
- main
- "test/*"
paths:
- "src/**/*"
- "test/**/*"
workflow_dispatch:
inputs:
release:
type: string
default: canary
jobs:
bun:
name: Bun
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-test
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: ${{ github.event.inputs.release || 'canary' }}
- id: setup-dependencies
name: Setup Dependencies
run: bun install
- id: test
name: Test
run: bun run test

137
.github/workflows/bun-types-release.yml vendored Normal file
View File

@@ -0,0 +1,137 @@
name: Release
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun install
- name: ESLint
run: bun run lint
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish on NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: 'https://registry.npmjs.org'
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish on NPM
run: cd packages/bun-types/dist/ && npm publish --access public
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: 'https://npm.pkg.github.com/'
scope: '@oven-sh'
- name: Install bun
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

View File

@@ -1,16 +1,16 @@
name: Test bun-types
name: TypeScript Types
on:
push:
paths:
- 'packages/bun-types/**'
- packages/bun-types/**/*
branches: [main]
pull_request:
paths:
- 'packages/bun-types/**'
- packages/bun-types/**/*
jobs:
tests:
name: Build and test
name: Build and Test
runs-on: ubuntu-latest
defaults:
run:
@@ -18,12 +18,12 @@ jobs:
steps:
- name: Checkout repo
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
uses: xhyrom/setup-bun@v0.1.8
with:
bun-version: canary
bun-version: latest
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
@@ -37,5 +37,8 @@ jobs:
- name: Generate package
run: bun run build
- name: ESLint
run: bun run lint
- name: Tests
run: bun run test

4
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -109,4 +110,3 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/debug-bindings-obj

View File

@@ -1,6 +1,7 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -12,7 +12,7 @@ ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -20,7 +20,7 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.5
ARG BUN_BASE_VERSION=0.4
FROM bitnami/minideb:bullseye as bun-base

View File

@@ -8,7 +8,7 @@ ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.947+cf822c6dd"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
@@ -99,7 +99,7 @@ RUN tar -xf ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/jan29/bun-webkit-linux-$BUILDARCH.tar.gz && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null

View File

@@ -35,11 +35,11 @@ DOCKER_BUILDARCH = amd64
BREW_PREFIX_PATH = /usr/local
DEFAULT_MIN_MACOS_VERSION = 10.14
MARCH_NATIVE = -march=$(CPU_TARGET) -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH = -march=nehalem
NATIVE_OR_OLD_MARCH = -march=westmere
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 0.5
BUN_BASE_VERSION = 0.4
AR=
@@ -509,9 +509,6 @@ npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
@@ -1394,19 +1391,10 @@ bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG
.PHONY: jsc-bindings-mac
jsc-bindings-mac: bindings
# lInux only
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
.PHONY: mimalloc-debug
mimalloc-debug:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; make clean || echo ""; \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} ${MIMALLOC_VALGRIND_ENABLED_FLAG} \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} \
-DCMAKE_BUILD_TYPE=Debug \
-DMI_DEBUG_FULL=1 \
-DMI_SKIP_COLLECT_ON_EXIT=1 \

View File

@@ -29,12 +29,6 @@ Native: (macOS x64 & Silicon, Linux x64, Windows Subsystem for Linux)
curl -fsSL https://bun.sh/install | bash
```
npm:
```sh
npm install -g bun
```
Homebrew: (MacOS and Linux)
```sh
@@ -181,7 +175,6 @@ bun upgrade --canary
- [`Bun.Transpiler.scan`](#buntranspilerscan)
- [`Bun.Transpiler.scanImports`](#buntranspilerscanimports)
- [`Bun.peek` - read a promise same-tick](#bunpeek---read-a-promise-without-resolving-it)
- [`Bun.dns` - lookup a domain](#bundns---lookup-a-domain)
- [Module resolution in Bun](#module-resolution-in-bun)
- [Environment variables](#environment-variables)
- [Credits](#credits)
@@ -422,21 +415,6 @@ Assuming a package.json with a `"clean"` command in `"scripts"`:
}
```
## Using bun as a WebAssembly runner
Bun v0.5.2 added experimental support for the [WebAssembly System Interface](https://github.com/WebAssembly/WASI) (WASI). This means you can run WebAssembly binaries in Bun.
To run a WASI binary, use `bun run`:
```bash
bun run ./my-wasm-app.wasm
# you can omit "run" if the filename ends with .wasm
bun ./my-wasm-app.wasm
```
WASI support is based on [wasi-js](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js). Currently, it only supports WASI binaries that use the `wasi_snapshot_preview1` or `wasi_unstable` APIs. Bun's implementation is not optimized for performance, but if this feature gets popular, we'll definitely invest time in making it faster.
## Creating a Discord bot with Bun
### Application Commands
@@ -602,6 +580,7 @@ You can see [Bun's Roadmap](https://github.com/oven-sh/bun/issues/159), but here
| ------------------------------------------------------------------------------------- | -------------- |
| Web Streams with Fetch API | bun.js |
| Web Streams with HTMLRewriter | bun.js |
| Package hoisting that matches npm behavior | bun install |
| Source Maps (unbundled is supported) | JS Bundler |
| Source Maps | CSS |
| JavaScript Minifier | JS Transpiler |
@@ -1275,7 +1254,7 @@ bun install --backend copyfile
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has it's own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
```bash
rm -rf node_modules
@@ -1296,7 +1275,7 @@ buns usage of `Cache-Control` ignores `Age`. This improves performance, but m
### `bun run`
`bun run` is a fast `package.json` script runner and executable runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
`bun run` is a fast `package.json` script runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
By default, `bun run` prints the script that will be invoked:
@@ -1311,7 +1290,7 @@ You can disable that with `--silent`
bun run --silent clean
```
`bun run ${script-name}` runs the equivalent of `npm run script-name`, `npx bin-name`, and `node file-name` all in one command. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${script-name}` runs the equivalent of `npm run script-name`. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${javascript-file.js}` will run it with bun, as long as the file doesn't have a node shebang.
@@ -2612,7 +2591,7 @@ If you need to read from `stdout` or `stderr` synchronously, you should use `Bun
`Bun.spawn` returns a `Subprocess` object.
More complete types are available in [`bun-types`](https://github.com/oven-sh/bun/tree/main/packages/bun-types).
More complete types are available in [`bun-types`](https://github.com/oven-sh/bun-types).
```ts
interface Subprocess {
@@ -2844,50 +2823,6 @@ queueMicrotask(() => {
Builtin buffering is planned in a future version of Bun.
## `Bun.dns` - lookup a domain
`Bun.dns` includes utilities to make DNS requests, similar to `node:dns`. As of Bun v0.5.0, the only implemented function is `dns.lookup`, though more will be implemented soon.
You can lookup the IP addresses of a hostname by using `dns.lookup`.
```ts
import { dns } from "bun";
const [{ address }] = await dns.lookup("example.com");
console.log(address); // "93.184.216.34"
```
If you need to limit IP addresses to either IPv4 or IPv6, you can specify the `family` as an option.
```ts
import { dns } from "bun";
const [{ address }] = await dns.lookup("example.com", { family: 6 });
console.log(address); // "2606:2800:220:1:248:1893:25c8:1946"
```
Bun supports three backends for DNS resolution:
- `c-ares` - This is the default on Linux, and it uses the [c-ares](https://c-ares.org/) library to perform DNS resolution.
- `system` - Uses the system's non-blocking DNS resolver, if available. Otherwise, falls back to `getaddrinfo`. This is the default on macOS, and the same as `getaddrinfo` on Linux.
- `getaddrinfo` - Uses the POSIX standard `getaddrinfo` function, which may cause performance issues under concurrent load.
You can choose a particular backend by specifying `backend` as an option.
```ts
import { dns } from "bun";
const [{ address, ttl }] = await dns.lookup("example.com", {
backend: "c-ares",
});
console.log(address); // "93.184.216.34"
console.log(ttl); // 21237
```
Note: the `ttl` property is only accurate when the `backend` is c-ares. Otherwise, `ttl` will be `0`.
This was added in Bun v0.5.0.
## `Bun.peek` - read a promise without resolving it
`Bun.peek` is a utility function that lets you read a promise's result without `await` or `.then`, but only if the promise has already fulfilled or rejected.
@@ -4646,7 +4581,7 @@ It will check the lockfile for the version. If the lockfile doesn't have a versi
Lowlights:
- TypeScript type support isn't implemented yet
- TypeScript type support isn't implmented yet
- patch package support isn't implemented yet
#### Resolving packages
@@ -5007,7 +4942,6 @@ bun also statically links these libraries:
- [`c-ares`](https://github.com/c-ares/c-ares), which is MIT licensed
- `libicu` 72, which can be found here: <https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE>
- A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets), which is Apache 2.0 licensed
- WASI implementation from [`wasi-js`](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js), which is BSD 3 clause licensed. Note that wasi-js is originally based on [wasmer-js](https://github.com/wasmerio/wasmer-js), which is MIT licensed. wasmer-js was based on [node-wasi](https://github.com/devsnek/node-wasi) by Gus Caplan (also MIT licensed). You can [read more about the history here](https://github.com/sagemathinc/cowasm/tree/main/packages/wasi-js#history).
For compatibility reasons, these NPM packages are embedded into buns binary and injected if imported.

View File

@@ -1,49 +1,9 @@
import { bench, run } from "mitata";
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";
bench("Buffer.isBuffer(buffer)", () => {
return Buffer.isBuffer(isBuffer);
});
{
var j = 0;
j += 1;
j += eval("'ok'");
bench("Buffer.isBuffer(string)", () => {
return Buffer.isBuffer(j);
});
}
bench("Buffer.from('short string')", () => {
return Buffer.from("short string");
});
const loooong = "long string".repeat(9999).split("").join(" ");
bench("Buffer.byteLength('long string'.repeat(9999))", () => {
return Buffer.byteLength(loooong);
});
var hundred = new ArrayBuffer(100);
bench("Buffer.from(ArrayBuffer(100))", () => {
return Buffer.from(hundred);
});
var hundredArray = new Uint8Array(100);
bench("Buffer.from(Uint8Array(100))", () => {
return Buffer.from(hundredArray);
});
var empty = new Uint8Array(0);
bench("Buffer.from(Uint8Array(0))", () => {
return Buffer.from(empty);
});
bench("new Buffer(Uint8Array(0))", () => {
return new Buffer(empty);
bench("new Buffer(0)", () => {
return new Buffer(0);
});
bench(`new Buffer(${N})`, () => {
@@ -66,4 +26,4 @@ bench("Buffer.alloc(24_000)", () => {
return Buffer.alloc(24_000);
});
await run({});
await run();

Binary file not shown.

View File

@@ -21,14 +21,8 @@ bench('crypto.createHash("sha512")', () => {
hasher.digest();
});
bench('crypto.createHash("sha256")', () => {
var hasher = crypto.createHash("sha256");
hasher.update(foo);
hasher.digest();
});
bench('crypto.createHash("sha1")', () => {
var hasher = crypto.createHash("sha1");
bench('crypto.createHash("sha512")', () => {
var hasher = crypto.createHash("sha512");
hasher.update(foo);
hasher.digest();
});

View File

@@ -1,81 +1,71 @@
const EventEmitterNative = require("events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
const EventEmitter = require("events").EventEmitter;
import { bench, run } from "mitata";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", (event) => {
event.preventDefault();
const emitter = new EventEmitter();
const event = new Event("hello");
emitter.on("hello", (event) => {
event.preventDefault();
});
var id = 0;
bench("EventEmitter.emit", () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench("[monkey] EventEmitter.emit", () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
if (!called) {
throw new Error("monkey failed");
}
});
bench("EventEmitter.on x 10_000 (handler)", () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
bench("[monkey] EventEmitter.on x 10_000 (handler)", () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
monkey.off("hey", cb);
});
var target = new EventTarget();
target.addEventListener("hello", (event) => {});

View File

@@ -1,25 +0,0 @@
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
bench("C++ fn regular", () => {
regular();
});
bench("C++ fn", () => {
fn();
});
bench("C++ getter", () => {
return noop.getterSetter;
});
bench("C++ setter", () => {
noop.getterSetter = 1;
});
run();

View File

@@ -1,6 +0,0 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
}
}

View File

@@ -1,102 +1,58 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bench, run, group } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const esbuild_ = require("esbuild/lib/main");
const swc_ = require("@swc/core");
const babel_ = require("@babel/core");
const code = readFileSync(
dirname(fileURLToPath(import.meta.url)) +
"/../../src/test/fixtures/simple.jsx",
"utf-8",
);
async function getWithName(name) {
let transformSync;
let transform;
let opts;
if (name === "bun") {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (name === "esbuild") {
try {
transformSync = esbuild_.transformSync;
transform = esbuild_.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (name === "swc") {
try {
transformSync = swc_.transformSync;
transform = swc_.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (name === "babel") {
try {
transformSync = babel_.transformSync;
transform = babel_.transform;
opts = {
sourceMaps: false,
presets: ["@babel/preset-react"],
};
} catch (exception) {
throw exception;
}
var transformSync;
var transform;
var opts;
if (process.isBun) {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (process.env["esbuild"]) {
try {
const esbuild = await import("esbuild");
transformSync = esbuild.transformSync;
transform = esbuild.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (process.env["swc"]) {
try {
const swc = await import("@swc/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (process.env["babel"]) {
try {
const swc = await import("@babel/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
presets: [(await import("@babel/preset-react")).default],
};
} catch (exception) {
throw exception;
}
return {
transformSync,
transform,
opts,
name,
};
}
const bun = process.isBun ? await getWithName("bun") : null;
const esbuild = await getWithName("esbuild");
const swc = await getWithName("swc");
const babel = await getWithName("babel");
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
const transpilers = [bun, esbuild, swc, babel].filter(Boolean);
group("transformSync (" + ((code.length / 1024) | 0) + " KB jsx file)", () => {
for (let { name, transformSync, opts } of transpilers) {
bench(name, () => {
transformSync(code, opts);
});
}
});
group("tranform x 5", () => {
for (let { name, transform, opts } of transpilers) {
bench(name, async () => {
return Promise.all([
transform(code, opts),
transform(code + "\n", opts),
transform("\n" + code + "\n", opts),
transform("\n" + code + "\n\n", opts),
transform("\n\n" + code + "\n\n", opts),
]);
});
}
});
await run();
if (process.env.ASYNC) {
console.log(await transform(code, opts));
} else {
console.log(transformSync(code, opts));
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,3 +1,6 @@
[test]
# Large monorepos (like Bun) may want to specify the test directory more specifically
# By default, `bun wiptest` scans every single folder recurisvely which, if you

View File

@@ -1,84 +0,0 @@
# https://hub.docker.com/_/debian
# https://hub.docker.com/_/ubuntu
ARG IMAGE=debian:bullseye-slim
FROM $IMAGE AS base
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM $IMAGE
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home
USER 1000:1000
COPY --from=base --chown=1000:1000 /usr/local/bin/bun /usr/local/bin
COPY --from=base --chown=1000:1000 /usr/local/bin/bunx /usr/local/bin
RUN which bun \
&& which bunx \
&& bun --version
WORKDIR /home/bun/app
CMD ["bun"]

View File

@@ -1,76 +0,0 @@
import { file, serve } from "bun";
import { existsSync, statSync } from "fs";
serve({
fetch(req: Request) {
let pathname = new URL(req.url).pathname.substring(1);
if (pathname == "") {
pathname = import.meta.url.replace("file://", "");
}
if (!existsSync(pathname)) {
return new Response(null, { status: 404 });
}
const stats = statSync(pathname);
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
const headers = new Headers({
"Content-Length": "" + stats.size,
"Last-Modified": stats.mtime.toUTCString(),
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
});
if (req.headers.get("if-none-match") === headers.get("ETag")) {
return new Response(null, { status: 304 });
}
const opts = { code: 200, start: 0, end: Infinity, range: false };
if (req.headers.has("range")) {
opts.code = 206;
let [x, y] = req.headers.get("range").replace("bytes=", "").split("-");
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
let start = (opts.start = parseInt(x, 10) || 0);
if (start >= stats.size || end >= stats.size) {
headers.set("Content-Range", `bytes */${stats.size}`);
return new Response(null, {
headers: headers,
status: 416,
});
}
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
headers.set("Content-Length", "" + (end - start + 1));
headers.set("Accept-Ranges", "bytes");
opts.range = true;
}
if (opts.range) {
return new Response(file(pathname).slice(opts.start, opts.end), {
headers,
status: opts.code,
});
}
return new Response(file(pathname), { headers, status: opts.code });
},
// this is called when fetch() throws or rejects
// error(err: Error) {
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
// },
// this boolean enables the bun's default error handler
// sometime after the initial release, it will auto reload as well
development: process.env.NODE_ENV !== "production",
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
// SSL is enabled if these two are set
// certFile: './cert.pem',
// keyFile: './key.pem',
port: 3000, // number or string
hostname: "localhost", // defaults to 0.0.0.0
});

View File

@@ -1,24 +0,0 @@
// A simple way to connect FileSystemRouter to Bun#serve
// run with `bun run index.tsx`
import { renderToReadableStream } from 'react-dom/server'
import { FileSystemRouter } from 'bun'
export default {
port: 3000,
async fetch(request: Request) {
const router = new FileSystemRouter({
dir: process.cwd() + "/pages",
style: "nextjs"
})
const route = router.match(request)
const { default: Root } = await import(route.filePath)
return new Response(
await renderToReadableStream(
<Root {...route.params} />
)
)
}
}

View File

@@ -1,14 +0,0 @@
{
"name": "react-routes",
"module": "index.tsx",
"type": "module",
"devDependencies": {
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"bun-types": "^0.4.0"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
}
}

View File

@@ -1,17 +0,0 @@
// reachable from http://localhost:3000/
export default () => (
<html>
<head>
<title>index</title>
</head>
<body>
<h1>
<a href="/one" >one</a>
</h1>
<h1>
<a href="/two" >two</a>
</h1>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/one
export default () => (
<html>
<head>
<title>one</title>
</head>
<body>
<p>one</p>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/two
export default () => (
<html>
<head>
<title>two</title>
</head>
<body>
<p>two</p>
</body>
</html>
)

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"strict": false,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,12 +1,14 @@
{
"dependencies": {
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"express": "^4.18.2",
"mitata": "^0.1.3",
"peechy": "latest",
"prettier": "^2.4.1",
"react": "next",
"react-dom": "next",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"prettier": "^2.4.1",
"svelte": "^3.52.0",
"typescript": "latest"
},
"private": true,
@@ -23,7 +25,7 @@
"@types/react": "^18.0.25",
"@typescript-eslint/eslint-plugin": "^5.31.0",
"@typescript-eslint/parser": "^5.31.0",
"bun-webkit": "0.0.1-dd868651f5c801985460c1128ae196d1edca9925"
"bun-webkit": "latest"
},
"version": "0.0.0"
}

230
packages/bun-error/package-lock.json generated Normal file
View File

@@ -0,0 +1,230 @@
{
"name": "bun-error",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
"integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==",
"dev": true
},
"@types/react": {
"version": "17.0.47",
"resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.47.tgz",
"integrity": "sha512-mk0BL8zBinf2ozNr3qPnlu1oyVTYq+4V7WA76RgxUAtf0Em/Wbid38KN6n4abEkvO4xMTBWmnP1FtQzgkEiJoA==",
"dev": true,
"requires": {
"@types/prop-types": "*",
"@types/scheduler": "*",
"csstype": "^3.0.2"
}
},
"@types/scheduler": {
"version": "0.16.2",
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
"integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==",
"dev": true
},
"csstype": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz",
"integrity": "sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==",
"dev": true
},
"esbuild": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz",
"integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==",
"requires": {
"esbuild-android-64": "0.14.48",
"esbuild-android-arm64": "0.14.48",
"esbuild-darwin-64": "0.14.48",
"esbuild-darwin-arm64": "0.14.48",
"esbuild-freebsd-64": "0.14.48",
"esbuild-freebsd-arm64": "0.14.48",
"esbuild-linux-32": "0.14.48",
"esbuild-linux-64": "0.14.48",
"esbuild-linux-arm": "0.14.48",
"esbuild-linux-arm64": "0.14.48",
"esbuild-linux-mips64le": "0.14.48",
"esbuild-linux-ppc64le": "0.14.48",
"esbuild-linux-riscv64": "0.14.48",
"esbuild-linux-s390x": "0.14.48",
"esbuild-netbsd-64": "0.14.48",
"esbuild-openbsd-64": "0.14.48",
"esbuild-sunos-64": "0.14.48",
"esbuild-windows-32": "0.14.48",
"esbuild-windows-64": "0.14.48",
"esbuild-windows-arm64": "0.14.48"
}
},
"esbuild-android-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz",
"integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==",
"optional": true
},
"esbuild-android-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz",
"integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==",
"optional": true
},
"esbuild-darwin-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz",
"integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==",
"optional": true
},
"esbuild-darwin-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz",
"integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==",
"optional": true
},
"esbuild-freebsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz",
"integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==",
"optional": true
},
"esbuild-freebsd-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz",
"integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==",
"optional": true
},
"esbuild-linux-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz",
"integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==",
"optional": true
},
"esbuild-linux-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz",
"integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==",
"optional": true
},
"esbuild-linux-arm": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz",
"integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==",
"optional": true
},
"esbuild-linux-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz",
"integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==",
"optional": true
},
"esbuild-linux-mips64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz",
"integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==",
"optional": true
},
"esbuild-linux-ppc64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz",
"integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==",
"optional": true
},
"esbuild-linux-riscv64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz",
"integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==",
"optional": true
},
"esbuild-linux-s390x": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz",
"integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==",
"optional": true
},
"esbuild-netbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz",
"integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==",
"optional": true
},
"esbuild-openbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz",
"integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==",
"optional": true
},
"esbuild-sunos-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz",
"integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==",
"optional": true
},
"esbuild-windows-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz",
"integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==",
"optional": true
},
"esbuild-windows-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz",
"integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==",
"optional": true
},
"esbuild-windows-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz",
"integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==",
"optional": true
},
"js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"requires": {
"js-tokens": "^3.0.0 || ^4.0.0"
}
},
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
"react": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
"integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
},
"react-dom": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz",
"integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1",
"scheduler": "^0.20.2"
}
},
"scheduler": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz",
"integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
}
}
}

View File

@@ -1,6 +0,0 @@
.DS_Store
.env
node_modules
/npm/**/bin
/npm/**/*.js
/npm/**/.npmrc

View File

@@ -1,2 +0,0 @@
@oven:registry=https://registry.npmjs.org/
//registry.npmjs.org/:_authToken=${NPM_TOKEN}

View File

@@ -1,15 +0,0 @@
# bun-release
Scripts that release Bun to npm, Dockerhub, Homebrew, etc.
### Running
```sh
bun run npm # build assets for the latest release
bun run npm -- <release> # build assets for the provided release
bun run npm -- <release> [dry-run|publish] # build and publish assets to npm
```
### Credits
- [esbuild](https://github.com/evanw/esbuild), for its npm scripts which this was largely based off of.

Binary file not shown.

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-aarch64",
"version": "0.5.3",
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64-baseline",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-aarch64",
"version": "0.5.3",
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64-baseline",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,31 +0,0 @@
# Bun
Bun is a fast all-in-one JavaScript runtime. https://bun.sh
### Install
```sh
npm install -g bun
```
### Upgrade
```sh
bun upgrade
```
### Supported Platforms
- [macOS, arm64 (Apple Silicon)](https://www.npmjs.com/package/@oven/bun-darwin-aarch64)
- [macOS, x64](https://www.npmjs.com/package/@oven/bun-darwin-x64)
- [macOS, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-darwin-x64-baseline)
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
### Future Platforms
- [Windows](https://github.com/oven-sh/bun/issues/43)
- Unix-like variants such as FreeBSD, OpenBSD, etc.
- Android and iOS

View File

@@ -1,42 +0,0 @@
{
"name": "bun",
"version": "0.5.3",
"description": "Bun is a fast all-in-one JavaScript runtime.",
"keywords": [
"bun",
"bun.js",
"node",
"node.js",
"runtime",
"bundler",
"transpiler",
"typescript"
],
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"bin": {
"bun": "bin/bun",
"bunx": "bin/bun"
},
"repository": "https://github.com/oven-sh/bun",
"scripts": {
"postinstall": "node install.js"
},
"optionalDependencies": {
"@oven/bun-darwin-aarch64": "0.5.3",
"@oven/bun-darwin-x64": "0.5.3",
"@oven/bun-darwin-x64-baseline": "0.5.3",
"@oven/bun-linux-aarch64": "0.5.3",
"@oven/bun-linux-x64": "0.5.3",
"@oven/bun-linux-x64-baseline": "0.5.3"
},
"os": [
"darwin",
"linux"
],
"cpu": [
"arm64",
"x64"
]
}

View File

@@ -1,19 +0,0 @@
{
"private": true,
"dependencies": {
"esbuild": "^0.17.3",
"jszip": "^3.10.1",
"octokit": "^2.0.14"
},
"devDependencies": {
"@octokit/types": "^8.1.1",
"bun-types": "^0.4.0",
"prettier": "^2.8.2"
},
"scripts": {
"format": "prettier --write src scripts",
"get-version": "bun scripts/get-version.ts",
"upload-npm": "bun scripts/upload-npm.ts",
"upload-assets": "bun scripts/upload-assets.ts"
}
}

View File

@@ -1,5 +0,0 @@
import { log } from "../src/console";
import { getSemver } from "../src/github";
log(await getSemver(process.argv[2]));
process.exit(0); // HACK

View File

@@ -1,13 +0,0 @@
import { importBun } from "../src/npm/install";
import { execFileSync } from "child_process";
importBun()
.then(bun => {
return execFileSync(bun, process.argv.slice(2), {
stdio: "inherit",
});
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,10 +0,0 @@
import { importBun, optimizeBun } from "../src/npm/install";
importBun()
.then(path => {
optimizeBun(path);
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,92 +0,0 @@
import { getRelease, uploadAsset } from "../src/github";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import { confirm, exit, log, stdin, warn } from "../src/console";
import { hash, join, rm, tmp, write, basename, blob } from "../src/fs";
const [tag, ...paths] = process.argv.slice(2);
if (!tag) {
exit("Invalid arguments: [tag] [...assets]");
}
const { tag_name, assets } = await getRelease(tag);
log("Release:", tag_name, "\n");
log("Existing assets:\n", ...assets.map(({ name }) => `- ${name}\n`));
log("Updating assets:\n", ...paths.map(path => `+ ${basename(path)}\n`));
await confirm();
log("Hashing assets...\n");
const existing: Map<string, string> = new Map();
for (const { name, browser_download_url } of assets) {
if (name.startsWith("SHASUMS256.txt")) {
continue;
}
const response = await fetch(browser_download_url);
const buffer = Buffer.from(await response.arrayBuffer());
existing.set(name, await hash(buffer));
}
const updated: Map<string, string> = new Map();
for (const path of paths) {
const name = basename(path);
updated.set(name, await hash(path));
}
log(
"Unchanged hashes:\n",
...Array.from(existing.entries())
.filter(([name]) => !updated.has(name))
.map(([name, sha256]) => ` - ${sha256} => ${name}\n`),
);
log("Changed hashes:\n", ...Array.from(updated.entries()).map(([name, sha256]) => ` + ${sha256} => ${name}\n`));
await confirm();
log("Signing assets...\n");
const cwd = tmp();
const path = join(cwd, "SHASUMS256.txt");
const signedPath = `${path}.asc`;
write(
path,
[...Array.from(updated.entries()), ...Array.from(existing.entries()).filter(([name]) => !updated.has(name))]
.sort(([a], [b]) => a.localeCompare(b))
.map(([name, sha256]) => `${sha256} ${name}`)
.join("\n"),
);
const { stdout: keys } = spawn("gpg", ["--list-secret-keys", "--keyid-format", "long"]);
const verifiedKeys = [
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59", // robobun@oven.sh
];
if (!verifiedKeys.find(key => keys.includes(key))) {
warn("Signature is probably wrong, key not found: robobun@oven.sh");
}
const passphrase = await stdin("Passphrase:");
log();
const { exitCode, stdout, stderr } = spawn(
"gpg",
["--pinentry-mode", "loopback", "--passphrase-fd", "0", "--clearsign", "--output", signedPath, path],
{
// @ts-ignore
input: passphrase,
stdout: "inherit",
stderr: "inherit",
},
);
if (exitCode !== 0) {
exit(stdout || stderr);
}
const uploads = [...paths, path, signedPath];
log("Uploading assets:\n", ...uploads.map(path => ` + ${basename(path)}\n`));
await confirm();
for (const path of uploads) {
const name = basename(path);
await uploadAsset(tag_name, name, blob(path));
}
try {
rm(cwd);
} catch {
warn("Failed to cleanup:", cwd, "\n");
}
log("Done");
process.exit(0); // FIXME

View File

@@ -1,164 +0,0 @@
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import type { Platform } from "../src/platform";
import { platforms } from "../src/platform";
import { getSemver } from "../src/github";
import { getRelease } from "../src/github";
import type { BuildOptions } from "esbuild";
import { buildSync, formatMessagesSync } from "esbuild";
import type { JSZipObject } from "jszip";
import { loadAsync } from "jszip";
import { debug, log, error } from "../src/console";
const module = "bun";
const owner = "@oven";
let version: string;
const [tag, action] = process.argv.slice(2);
await build(tag);
if (action === "publish") {
await publish();
} else if (action === "dry-run") {
await publish(true);
} else if (action) {
throw new Error(`Unknown action: ${action}`);
}
process.exit(0); // HACK
async function build(tag?: string): Promise<void> {
const release = await getRelease(tag);
version = await getSemver(release.tag_name);
await buildRootModule();
for (const platform of platforms) {
await buildModule(release, platform);
}
}
async function publish(dryRun?: boolean): Promise<void> {
const modules = platforms.map(({ bin }) => `${owner}/${bin}`);
modules.push(module);
for (const module of modules) {
publishModule(module, dryRun);
}
}
async function buildRootModule() {
log("Building:", `${module}@${version}`);
const cwd = join("npm", module);
const define = {
version: `"${version}"`,
module: `"${module}"`,
owner: `"${owner}"`,
};
bundle(join("scripts", "npm-postinstall.ts"), join(cwd, "install.js"), {
define,
});
bundle(join("scripts", "npm-exec.ts"), join(cwd, "bin", "bun"), {
define,
banner: {
js: "#!/usr/bin/env node",
},
});
const os = [...new Set(platforms.map(({ os }) => os))];
const cpu = [...new Set(platforms.map(({ arch }) => arch))];
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
scripts: {
postinstall: "node install.js",
},
optionalDependencies: Object.fromEntries(platforms.map(({ bin }) => [`${owner}/${bin}`, version])),
bin: {
bun: "bin/bun",
bunx: "bin/bun",
},
os,
cpu,
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
async function buildModule(
release: Awaited<ReturnType<typeof getRelease>>,
{ bin, exe, os, arch }: Platform,
): Promise<void> {
const module = `${owner}/${bin}`;
log("Building:", `${module}@${version}`);
const asset = release.assets.find(({ name }) => name === `${bin}.zip`);
if (!asset) {
error(`No asset found: ${bin}`);
return;
}
const bun = await extractFromZip(asset.browser_download_url, `${bin}/bun`);
const cwd = join("npm", module);
write(join(cwd, exe), await bun.async("arraybuffer"));
chmod(join(cwd, exe), 0o755);
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
preferUnplugged: true,
os: [os],
cpu: [arch],
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
function publishModule(name: string, dryRun?: boolean): void {
log(dryRun ? "Dry-run Publishing:" : "Publishing:", `${name}@${version}`);
const { exitCode, stdout, stderr } = spawn(
"npm",
[
"publish",
"--access",
"public",
"--tag",
version.includes("canary") ? "canary" : "latest",
...(dryRun ? ["--dry-run"] : []),
],
{
cwd: join("npm", name),
},
);
if (exitCode === 0) {
error(stderr || stdout);
}
}
async function extractFromZip(url: string, filename: string): Promise<JSZipObject> {
const response = await fetch(url);
const buffer = await response.arrayBuffer();
const zip = await loadAsync(buffer);
for (const [name, file] of Object.entries(zip.files)) {
if (!file.dir && name.startsWith(filename)) {
return file;
}
}
debug("Found files:", Object.keys(zip.files));
throw new Error(`File not found: ${filename}`);
}
function bundle(src: string, dst: string, options: BuildOptions = {}): void {
const { errors } = buildSync({
bundle: true,
treeShaking: true,
keepNames: true,
minifySyntax: true,
pure: ["console.debug"],
platform: "node",
target: "es6",
format: "cjs",
entryPoints: [src],
outfile: dst,
...options,
});
if (errors?.length) {
const messages = formatMessagesSync(errors, { kind: "error" });
throw new Error(messages.join("\n"));
}
}

View File

@@ -1,75 +0,0 @@
import { isatty } from "tty";
import { createInterface } from "readline";
export const isAction = !!process.env["GITHUB_ACTION"];
export const isDebug =
process.env["DEBUG"] === "1" || process.env["LOG_LEVEL"] === "debug" || process.env["RUNNER_DEBUG"] === "1";
export function debug(...message: any[]): void {
if (isAction) {
console.debug("::debug::", ...message);
} else if (isDebug) {
console.debug(...message);
}
}
export function log(...message: any[]): void {
console.log(...message);
}
export function warn(...message: any[]): void {
if (isAction) {
console.warn("::warning::", ...message);
} else {
console.warn(...message);
}
}
export function error(...message: any[]): void {
if (isAction) {
console.error("::error::", ...message);
} else {
console.error(...message);
}
}
export function exit(...message: any[]): never {
error(...message);
process.exit(1);
}
export function isTty(): boolean {
return isatty(process.stdout.fd);
}
export async function stdin(question: string): Promise<string> {
if (isTty()) {
return prompt(question) || "";
}
const reader = createInterface({
input: process.stdin,
terminal: false,
});
let buffer = "";
reader.on("line", line => {
buffer += line;
});
return new Promise(resolve => {
reader.once("close", () => resolve(buffer));
});
}
export async function confirm(message?: string): Promise<void> {
if (!isTty()) {
return;
}
const question = message ?? "Confirm?";
switch (prompt(`${question} [y/n]`)) {
case "y":
case "Y":
log();
return;
}
exit();
}

View File

@@ -1,70 +0,0 @@
import { debug, isDebug } from "./console";
export const fetch = "fetch" in globalThis ? webFetch : nodeFetch;
type Options = RequestInit & { assert?: boolean };
async function webFetch(url: string, options: Options = {}): Promise<Response> {
debug("fetch request", url, options);
const response = await globalThis.fetch(url, options, { verbose: isDebug });
debug("fetch response", response);
if (options?.assert !== false && !isOk(response.status)) {
try {
debug(await response.text());
} catch {}
throw new Error(`${response.status}: ${url}`);
}
return response;
}
async function nodeFetch(url: string, options: Options = {}): Promise<Response> {
const { get } = await import("node:http");
return new Promise((resolve, reject) => {
get(url, response => {
debug("http.get", url, response.statusCode);
const status = response.statusCode ?? 501;
if (response.headers.location && isRedirect(status)) {
return nodeFetch(url).then(resolve, reject);
}
if (options?.assert !== false && !isOk(status)) {
return reject(new Error(`${status}: ${url}`));
}
const body: Buffer[] = [];
response.on("data", chunk => {
body.push(chunk);
});
response.on("end", () => {
resolve({
ok: isOk(status),
status,
async arrayBuffer() {
return Buffer.concat(body).buffer as ArrayBuffer;
},
async text() {
return Buffer.concat(body).toString("utf-8");
},
async json() {
const text = Buffer.concat(body).toString("utf-8");
return JSON.parse(text);
},
} as Response);
});
}).on("error", reject);
});
}
function isOk(status: number): boolean {
return status >= 200 && status <= 204;
}
function isRedirect(status: number): boolean {
switch (status) {
case 301: // Moved Permanently
case 308: // Permanent Redirect
case 302: // Found
case 307: // Temporary Redirect
case 303: // See Other
return true;
}
return false;
}

View File

@@ -1,159 +0,0 @@
import path from "path";
import fs from "fs";
import os from "os";
import crypto from "crypto";
import { debug } from "./console";
export function join(...paths: (string | string[])[]): string {
return path.join(...paths.flat(2));
}
export function basename(...paths: (string | string[])[]): string {
return path.basename(join(...paths));
}
export function tmp(): string {
const tmpdir = process.env["RUNNER_TEMP"] ?? os.tmpdir();
const dir = fs.mkdtempSync(join(tmpdir, "bun-"));
debug("tmp", dir);
return dir;
}
export function rm(path: string): void {
debug("rm", path);
try {
fs.rmSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmSync failed", error);
// Did not exist before Node.js v14.
// Attempt again with older, slower implementation.
}
let stats: fs.Stats;
try {
stats = fs.lstatSync(path);
} catch (error) {
debug("fs.lstatSync failed", error);
// The file was likely deleted, so return early.
return;
}
if (!stats.isDirectory()) {
fs.unlinkSync(path);
return;
}
try {
fs.rmdirSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmdirSync failed", error);
// Recursive flag did not exist before Node.js X.
// Attempt again with older, slower implementation.
}
for (const filename of fs.readdirSync(path)) {
rm(join(path, filename));
}
fs.rmdirSync(path);
}
export function rename(path: string, newPath: string): void {
debug("rename", path, newPath);
try {
fs.renameSync(path, newPath);
return;
} catch (error) {
debug("fs.renameSync failed", error);
// If there is an error, delete the new path and try again.
}
try {
rm(newPath);
} catch (error) {
debug("rm failed", error);
// The path could have been deleted already.
}
fs.renameSync(path, newPath);
}
export function write(dst: string, content: string | ArrayBuffer | ArrayBufferView): void {
debug("write", dst);
try {
fs.writeFileSync(dst, content);
return;
} catch (error) {
debug("fs.writeFileSync failed", error);
// If there is an error, ensure the parent directory
// exists and try again.
try {
fs.mkdirSync(path.dirname(dst), { recursive: true });
} catch (error) {
debug("fs.mkdirSync failed", error);
// The directory could have been created already.
}
fs.writeFileSync(dst, content);
}
}
export function writeJson(path: string, json: object, force?: boolean): void {
let value = json;
if (!force && exists(path)) {
try {
const existing = JSON.parse(read(path));
value = {
...existing,
...json,
};
} catch {
value = json;
}
}
write(path, `${JSON.stringify(value, undefined, 2)}\n`);
}
export function read(path: string): string {
debug("read", path);
return fs.readFileSync(path, "utf-8");
}
export function blob(path: string): Blob {
debug("blob", path);
if ("Bun" in globalThis) {
return Bun.file(path);
}
const buffer = fs.readFileSync(path);
return new Blob([buffer], {
type: path.endsWith(".zip") ? "application/zip" : path.endsWith(".txt") ? "text/plain" : "application/octet-stream",
});
}
export function hash(content: string | crypto.BinaryLike): string {
debug("hash", content);
return crypto
.createHash("sha256")
.update(typeof content === "string" ? fs.readFileSync(content) : content)
.digest("hex");
}
export function chmod(path: string, mode: fs.Mode): void {
debug("chmod", path, mode);
fs.chmodSync(path, mode);
}
export function copy(path: string, newPath: string): void {
debug("copy", path, newPath);
try {
fs.copyFileSync(path, newPath);
return;
} catch (error) {
debug("fs.copyFileSync failed", error);
}
write(newPath, read(path));
}
export function exists(path: string): boolean {
debug("exists", path);
try {
return fs.existsSync(path);
} catch (error) {
debug("fs.existsSync failed", error);
}
return false;
}

View File

@@ -1,120 +0,0 @@
import type { Endpoints, RequestParameters, Route } from "@octokit/types";
import { Octokit } from "octokit";
import { fetch } from "./fetch";
import { debug, log, warn, error } from "./console";
const [owner, repo] = process.env["GITHUB_REPOSITORY"]?.split("/") ?? ["oven-sh", "bun"];
const octokit = new Octokit({
auth: process.env["GITHUB_TOKEN"],
request: {
fetch,
},
log: {
debug,
info: log,
warn,
error,
},
});
export async function github<R extends Route>(
url: R | keyof Endpoints,
options?: Omit<
R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters,
"owner" | "repo"
>,
): Promise<R extends keyof Endpoints ? Endpoints[R]["response"]["data"] : unknown> {
// @ts-ignore
const { data } = await octokit.request(url, {
owner,
repo,
...options,
});
return data;
}
export async function getRelease(tag?: string) {
if (!tag) {
return github("GET /repos/{owner}/{repo}/releases/latest");
}
return github("GET /repos/{owner}/{repo}/releases/tags/{tag}", {
tag: formatTag(tag),
});
}
export async function uploadAsset(tag: string, name: string, blob: Blob) {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
// Github requires that existing assets are deleted before uploading
// a new asset, but does not provide a rename or re-upload API?!?
if (asset) {
await github("DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", {
asset_id: asset.id,
});
}
return github("POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
baseUrl: "https://uploads.github.com",
release_id: release.id,
name,
headers: {
"content-type": blob.type,
"content-length": blob.size,
},
data: Buffer.from(await blob.arrayBuffer()),
});
}
export async function downloadAsset(tag: string, name: string): Promise<Blob> {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
if (!asset) {
throw new Error(`Asset not found: ${name}`);
}
const response = await fetch(asset.browser_download_url);
return response.blob();
}
export async function getSha(tag: string, format?: "short" | "long") {
const ref = formatTag(tag);
const {
object: { sha },
} = await github("GET /repos/{owner}/{repo}/git/ref/{ref}", {
ref: ref === "canary" ? "heads/main" : `tags/${ref}`,
});
return format === "short" ? sha.substring(0, 7) : sha;
}
export async function getBuild(): Promise<number> {
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
const response = await fetch("https://registry.npmjs.org/-/package/bun/dist-tags");
const { canary }: { canary: string } = await response.json();
if (!canary.includes(date)) {
return 1;
}
const match = /canary.[0-9]{8}\.([0-9]+)+?/.exec(canary);
return match ? 1 + parseInt(match[1]) : 1;
}
export async function getSemver(tag?: string, build?: number): Promise<string> {
const { tag_name } = await getRelease(tag);
if (tag_name !== "canary") {
return tag_name.replace("bun-v", "");
}
if (build === undefined) {
build = await getBuild();
}
const sha = await getSha(tag_name, "short");
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
return `${Bun.version}-canary.${date}.${build}+${sha}`;
}
export function formatTag(tag: string): string {
if (tag === "canary" || tag.startsWith("bun-v")) {
return tag;
}
if (tag.startsWith("v")) {
return tag.slice(1);
}
return `bun-v${tag}`;
}

View File

@@ -1,144 +0,0 @@
import { fetch } from "../fetch";
import { spawn } from "../spawn";
import { chmod, join, rename, rm, tmp, write } from "../fs";
import { unzipSync } from "zlib";
import type { Platform } from "../platform";
import { os, arch, supportedPlatforms } from "../platform";
import { debug, error } from "../console";
declare const version: string;
declare const module: string;
declare const owner: string;
export async function importBun(): Promise<string> {
if (!supportedPlatforms.length) {
throw new Error(`Unsupported platform: ${os} ${arch}`);
}
for (const platform of supportedPlatforms) {
try {
return await requireBun(platform);
} catch (error) {
debug("requireBun failed", error);
}
}
throw new Error(`Failed to install package "${module}"`);
}
async function requireBun(platform: Platform): Promise<string> {
const module = `${owner}/${platform.bin}`;
function resolveBun() {
const exe = require.resolve(join(module, platform.exe));
const { exitCode, stderr, stdout } = spawn(exe, ["--version"]);
if (exitCode === 0) {
return exe;
}
throw new Error(stderr || stdout);
}
try {
return resolveBun();
} catch (cause) {
debug("resolveBun failed", cause);
error(
`Failed to find package "${module}".`,
`You may have used the "--no-optional" flag when running "npm install".`,
);
}
const cwd = join("node_modules", module);
try {
installBun(platform, cwd);
} catch (cause) {
debug("installBun failed", cause);
error(`Failed to install package "${module}" using "npm install".`, cause);
try {
await downloadBun(platform, cwd);
} catch (cause) {
debug("downloadBun failed", cause);
error(`Failed to download package "${module}" from "registry.npmjs.org".`, cause);
}
}
return resolveBun();
}
function installBun(platform: Platform, dst: string): void {
const module = `${owner}/${platform.bin}`;
const cwd = tmp();
try {
write(join(cwd, "package.json"), "{}");
const { exitCode } = spawn(
"npm",
["install", "--loglevel=error", "--prefer-offline", "--no-audit", "--progress=false", `${module}@${version}`],
{
cwd,
stdio: "pipe",
env: {
...process.env,
npm_config_global: undefined,
},
},
);
if (exitCode === 0) {
rename(join(cwd, "node_modules", module), dst);
}
} finally {
try {
rm(cwd);
} catch (error) {
debug("rm failed", error);
// There is nothing to do if the directory cannot be cleaned up.
}
}
}
async function downloadBun(platform: Platform, dst: string): Promise<void> {
const response = await fetch(`https://registry.npmjs.org/${owner}/${platform.bin}/-/${platform.bin}-${version}.tgz`);
const tgz = await response.arrayBuffer();
let buffer: Buffer;
try {
buffer = unzipSync(tgz);
} catch (cause) {
throw new Error("Invalid gzip data", { cause });
}
function str(i: number, n: number): string {
return String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, "");
}
let offset = 0;
while (offset < buffer.length) {
const name = str(offset, 100).replace("package/", "");
const size = parseInt(str(offset + 124, 12), 8);
offset += 512;
if (!isNaN(size)) {
write(join(dst, name), buffer.subarray(offset, offset + size));
if (name === platform.exe) {
try {
chmod(join(dst, name), 0o755);
} catch (error) {
debug("chmod failed", error);
}
}
offset += (size + 511) & ~511;
}
}
}
export function optimizeBun(path: string): void {
if (os === "win32") {
throw new Error(
"You must use Windows Subsystem for Linux, aka. WSL, to run bun. Learn more: https://learn.microsoft.com/en-us/windows/wsl/install",
);
}
const { npm_config_user_agent } = process.env;
if (npm_config_user_agent && /\byarn\//.test(npm_config_user_agent)) {
throw new Error(
"Yarn does not support bun, because it does not allow linking to binaries. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}
try {
rename(path, join(__dirname, "bin", "bun"));
return;
} catch (error) {
debug("optimizeBun failed", error);
}
throw new Error(
"Your package manager doesn't seem to support bun. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}

View File

@@ -1,91 +0,0 @@
import { spawn } from "./spawn";
import { read } from "./fs";
import { debug } from "./console";
export const os = process.platform;
export const arch = os === "darwin" && process.arch === "x64" && isRosetta2() ? "arm64" : process.arch;
export const avx2 = (arch === "x64" && os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2());
export type Platform = {
os: string;
arch: string;
avx2?: boolean;
bin: string;
exe: string;
};
export const platforms: Platform[] = [
{
os: "darwin",
arch: "arm64",
bin: "bun-darwin-aarch64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
avx2: true,
bin: "bun-darwin-x64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
bin: "bun-darwin-x64-baseline",
exe: "bin/bun",
},
{
os: "linux",
arch: "arm64",
bin: "bun-linux-aarch64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
avx2: true,
bin: "bun-linux-x64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
bin: "bun-linux-x64-baseline",
exe: "bin/bun",
},
];
export const supportedPlatforms: Platform[] = platforms
.filter(platform => platform.os === os && platform.arch === arch && (!platform.avx2 || avx2))
.sort((a, b) => (a.avx2 === b.avx2 ? 0 : a.avx2 ? -1 : 1));
function isLinuxAVX2(): boolean {
try {
return read("/proc/cpuinfo").includes("avx2");
} catch (error) {
debug("isLinuxAVX2 failed", error);
return false;
}
}
function isDarwinAVX2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "machdep.cpu"]);
return exitCode === 0 && stdout.includes("AVX2");
} catch (error) {
debug("isDarwinAVX2 failed", error);
return false;
}
}
function isRosetta2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "sysctl.proc_translated"]);
return exitCode === 0 && stdout.includes("1");
} catch (error) {
debug("isRosetta2 failed", error);
return false;
}
}

View File

@@ -1,24 +0,0 @@
import child_process from "child_process";
import { debug } from "./console";
export function spawn(
cmd: string,
args: string[],
options: child_process.SpawnOptions = {},
): {
exitCode: number;
stdout: string;
stderr: string;
} {
debug("spawn", [cmd, ...args].join(" "));
const { status, stdout, stderr } = child_process.spawnSync(cmd, args, {
stdio: "pipe",
encoding: "utf-8",
...options,
});
return {
exitCode: status ?? 1,
stdout,
stderr,
};
}

View File

@@ -1,17 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src",
"scripts"
]
}

View File

@@ -1,3 +0,0 @@
.DS_Store
.env
node_modules

View File

@@ -1,3 +0,0 @@
# bun-test
Scripts to run Bun's tests using `bun wiptest`.

Binary file not shown.

View File

@@ -1,13 +0,0 @@
{
"private": true,
"dependencies": {
"@actions/core": "^1.10.0"
},
"devDependencies": {
"bun-types": "canary",
"prettier": "^2.8.2"
},
"scripts": {
"test": "bun run src/runner.ts"
}
}

View File

@@ -1,86 +0,0 @@
import { spawn } from "bun";
import { readdirSync } from "node:fs";
import { resolve } from "node:path";
import * as action from "@actions/core";
const cwd = resolve("../..");
const isAction = !!process.env["GITHUB_ACTION"];
const errorPattern = /error: ([\S\s]*?)(?=\n.*?at (\/.*):(\d+):(\d+))/mgi;
function* findTests(dir: string, query?: string): Generator<string> {
for (const entry of readdirSync(resolve(dir), { encoding: "utf-8", withFileTypes: true })) {
const path = resolve(dir, entry.name);
if (entry.isDirectory()) {
yield* findTests(path, query);
} else if (entry.isFile() && entry.name.includes(".test.")) {
yield path;
}
}
}
async function runTest(path: string): Promise<void> {
const name = path.replace(cwd, "").slice(1);
const runner = await spawn({
cwd,
cmd: ["bun", "wiptest", path],
stdout: "pipe",
stderr: "pipe",
});
const exitCode = await Promise.race([
new Promise((resolve) => {
setTimeout(() => {
runner.kill();
resolve(124); // Timed Out
}, 60_000);
}),
runner.exited,
]);
if (isAction) {
const prefix = exitCode === 0
? "PASS"
: `FAIL (exit code ${exitCode})`;
action.startGroup(`${prefix} - ${name}`);
}
for (const stdout of [runner.stdout, runner.stderr]) {
if (!stdout) {
continue;
}
const reader = stdout.getReader();
while (true) {
const { value, done } = await reader.read();
if (value) {
console.write(value);
if (isAction) {
findErrors(value);
}
}
if (done) {
break;
}
}
}
if (isAction) {
action.endGroup();
}
}
let failed = false;
function findErrors(data: Uint8Array): void {
const text = new TextDecoder().decode(data);
for (const [message, _, path, line, col] of text.matchAll(errorPattern)) {
failed = true;
action.error(message, {
file: path.replace(cwd, "").slice(1),
startLine: parseInt(line),
startColumn: parseInt(col),
});
}
}
const tests = [];
for (const path of findTests(resolve(cwd, "test/bun.js"))) {
tests.push(runTest(path).catch(console.error));
}
await Promise.allSettled(tests);
process.exit(failed ? 1 : 0);

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src"
]
}

View File

@@ -22,10 +22,10 @@ Add this to your `tsconfig.json` or `jsconfig.json`:
```jsonc
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "Node",
"lib": ["esnext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "node",
// "bun-types" is the important part
"types": ["bun-types"]
}

View File

@@ -411,22 +411,22 @@ declare module "bun" {
* ## Example
*
* ```js
* const [{ address }] = await Bun.dns.lookup('example.com');
* const {address} = await Bun.dns.lookup('example.com');
* ```
*
* ### Filter results to IPv4:
*
* ```js
* import { dns } from 'bun';
* const [{ address }] = await dns.lookup('example.com', {family: 4});
* import {dns} from 'bun';
* const {address} = await dns.lookup('example.com', {family: 4});
* console.log(address); // "123.122.22.126"
* ```
*
* ### Filter results to IPv6:
*
* ```js
* import { dns } from 'bun';
* const [{ address }] = await dns.lookup('example.com', {family: 6});
* import {dns} from 'bun';
* const {address} = await dns.lookup('example.com', {family: 6});
* console.log(address); // "2001:db8::1"
* ```
*
@@ -439,8 +439,8 @@ declare module "bun" {
*
* To customize the DNS resolver, pass a `backend` option to `dns.lookup`:
* ```js
* import { dns } from 'bun';
* const [{ address }] = await dns.lookup('example.com', {backend: 'getaddrinfo'});
* import {dns} from 'bun';
* const {address} = await dns.lookup('example.com', {backend: 'getaddrinfo'});
* console.log(address); // "19.42.52.62"
* ```
*/
@@ -1592,12 +1592,11 @@ declare module "bun" {
/**
* Stop listening to prevent new connections from being accepted.
*
* By default, it does not cancel in-flight requests or websockets. That means it may take some time before all network activity stops.
* It does not close existing connections.
*
* @param closeActiveConnections Immediately terminate in-flight requests, websockets, and stop accepting new connections.
* @default false
* It may take a second or two to actually stop.
*/
stop(closeActiveConnections?: boolean): void;
stop(): void;
/**
* Update the `fetch` and `error` handlers without restarting the server.
@@ -2623,7 +2622,7 @@ declare module "bun" {
*/
builder: PluginBuilder,
): void | Promise<void>;
}): ReturnType<(typeof options)["setup"]>;
}): ReturnType<typeof options["setup"]>;
/**
* Deactivate all plugins
@@ -2742,7 +2741,7 @@ declare module "bun" {
}
interface SocketListener<Options extends SocketOptions = SocketOptions> {
stop(closeActiveConnections?: boolean): void;
stop(): void;
ref(): void;
unref(): void;
reload(options: Pick<Partial<Options>, "socket">): void;
@@ -2761,26 +2760,11 @@ declare module "bun" {
interface TCPSocket extends Socket {}
interface TLSSocket extends Socket {}
type BinaryTypeList = {
arraybuffer: ArrayBuffer;
buffer: Buffer;
uint8array: Uint8Array;
// TODO: DataView
// dataview: DataView;
};
type BinaryType = keyof BinaryTypeList;
interface SocketHandler<
Data = unknown,
DataBinaryType extends BinaryType = "buffer",
> {
interface SocketHandler<Data = unknown> {
open(socket: Socket<Data>): void | Promise<void>;
close?(socket: Socket<Data>): void | Promise<void>;
error?(socket: Socket<Data>, error: Error): void | Promise<void>;
data?(
socket: Socket<Data>,
data: BinaryTypeList[DataBinaryType],
): void | Promise<void>;
data?(socket: Socket<Data>, data: BufferSource): void | Promise<void>;
drain?(socket: Socket<Data>): void | Promise<void>;
/**
@@ -2802,24 +2786,7 @@ declare module "bun" {
* When `connectError` is not specified, the rejected promise will be added
* to the promise rejection queue.
*/
connectError?(socket: Socket<Data>, error: Error): void | Promise<void>;
/**
* Choose what `ArrayBufferView` is returned in the {@link SocketHandler.data} callback.
*
* @default "buffer"
*
* @remarks
* This lets you select the desired binary type for the `data` callback.
* It's a small performance optimization to let you avoid creating extra
* ArrayBufferView objects when possible.
*
* Bun originally defaulted to `Uint8Array` but when dealing with network
* data, it's more useful to be able to directly read from the bytes which
* `Buffer` allows.
*
*/
binaryType?: BinaryType;
connectError?(error: Error): void | Promise<void>;
}
interface SocketOptions<Data = unknown> {
@@ -3115,7 +3082,6 @@ declare module "bun" {
* ```
*/
readonly params: Record<string, string>;
readonly filePath: string;
readonly pathname: string;
readonly query: Record<string, string>;
readonly name: string;

View File

@@ -318,127 +318,127 @@ declare module "dns" {
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "A",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "AAAA",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "ANY",
callback: (
err: ErrnoException | null,
addresses: AnyRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "CNAME",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "MX",
callback: (
err: ErrnoException | null,
addresses: MxRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NAPTR",
callback: (
err: ErrnoException | null,
addresses: NaptrRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NS",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "PTR",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SOA",
callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SRV",
callback: (
err: ErrnoException | null,
addresses: SrvRecord[],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: "TXT",
callback: (
err: ErrnoException | null,
addresses: string[][],
) => void,
): void;
export function resolve(
hostname: string,
rrtype: string,
callback: (
err: ErrnoException | null,
addresses:
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[],
) => void,
): void;
export namespace resolve {
function __promisify__(
hostname: string,
rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
): Promise<string[]>;
function __promisify__(
hostname: string,
rrtype: "ANY",
): Promise<AnyRecord[]>;
function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function __promisify__(
hostname: string,
rrtype: "NAPTR",
): Promise<NaptrRecord[]>;
function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function __promisify__(
hostname: string,
rrtype: "SRV",
): Promise<SrvRecord[]>;
function __promisify__(
hostname: string,
rrtype: "TXT",
): Promise<string[][]>;
function __promisify__(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
}
// export function resolve(
// hostname: string,
// rrtype: "A",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "AAAA",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "ANY",
// callback: (
// err: ErrnoException | null,
// addresses: AnyRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "CNAME",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "MX",
// callback: (
// err: ErrnoException | null,
// addresses: MxRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NAPTR",
// callback: (
// err: ErrnoException | null,
// addresses: NaptrRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NS",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "PTR",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SOA",
// callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SRV",
// callback: (
// err: ErrnoException | null,
// addresses: SrvRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "TXT",
// callback: (
// err: ErrnoException | null,
// addresses: string[][],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: string,
// callback: (
// err: ErrnoException | null,
// addresses:
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[],
// ) => void,
// ): void;
// export namespace resolve {
// function __promisify__(
// hostname: string,
// rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
// ): Promise<string[]>;
// function __promisify__(
// hostname: string,
// rrtype: "ANY",
// ): Promise<AnyRecord[]>;
// function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "NAPTR",
// ): Promise<NaptrRecord[]>;
// function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function __promisify__(
// hostname: string,
// rrtype: "SRV",
// ): Promise<SrvRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "TXT",
// ): Promise<string[][]>;
// function __promisify__(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
// }
/**
* Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function
* will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -512,38 +512,38 @@ declare module "dns" {
* will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`).
* @since v0.3.2
*/
export function resolveCname(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveCname {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveCname(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveCname {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function
* will contain an array of certification authority authorization records
* available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
export function resolveCaa(
hostname: string,
callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
): void;
export namespace resolveCaa {
function __promisify__(hostname: string): Promise<CaaRecord[]>;
}
// export function resolveCaa(
// hostname: string,
// callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
// ): void;
// export namespace resolveCaa {
// function __promisify__(hostname: string): Promise<CaaRecord[]>;
// }
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v0.1.27
*/
export function resolveMx(
hostname: string,
callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
): void;
export namespace resolveMx {
function __promisify__(hostname: string): Promise<MxRecord[]>;
}
// export function resolveMx(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
// ): void;
// export namespace resolveMx {
// function __promisify__(hostname: string): Promise<MxRecord[]>;
// }
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of
* objects with the following properties:
@@ -567,37 +567,37 @@ declare module "dns" {
* ```
* @since v0.9.12
*/
export function resolveNaptr(
hostname: string,
callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
): void;
export namespace resolveNaptr {
function __promisify__(hostname: string): Promise<NaptrRecord[]>;
}
// export function resolveNaptr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
// ): void;
// export namespace resolveNaptr {
// function __promisify__(hostname: string): Promise<NaptrRecord[]>;
// }
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`).
* @since v0.1.90
*/
export function resolveNs(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveNs {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveNs(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveNs {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of strings containing the reply records.
* @since v6.0.0
*/
export function resolvePtr(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolvePtr {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolvePtr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolvePtr {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. The `address` argument passed to the `callback` function will
@@ -624,13 +624,13 @@ declare module "dns" {
* ```
* @since v0.11.10
*/
export function resolveSoa(
hostname: string,
callback: (err: ErrnoException | null, address: SoaRecord) => void,
): void;
export namespace resolveSoa {
function __promisify__(hostname: string): Promise<SoaRecord>;
}
// export function resolveSoa(
// hostname: string,
// callback: (err: ErrnoException | null, address: SoaRecord) => void,
// ): void;
// export namespace resolveSoa {
// function __promisify__(hostname: string): Promise<SoaRecord>;
// }
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of objects with the following properties:
@@ -650,13 +650,13 @@ declare module "dns" {
* ```
* @since v0.1.27
*/
export function resolveSrv(
hostname: string,
callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
): void;
export namespace resolveSrv {
function __promisify__(hostname: string): Promise<SrvRecord[]>;
}
// export function resolveSrv(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
// ): void;
// export namespace resolveSrv {
// function __promisify__(hostname: string): Promise<SrvRecord[]>;
// }
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a
* two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -664,13 +664,13 @@ declare module "dns" {
* treated separately.
* @since v0.1.27
*/
export function resolveTxt(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[][]) => void,
): void;
export namespace resolveTxt {
function __promisify__(hostname: string): Promise<string[][]>;
}
// export function resolveTxt(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[][]) => void,
// ): void;
// export namespace resolveTxt {
// function __promisify__(hostname: string): Promise<string[][]>;
// }
/**
* Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query).
* The `ret` argument passed to the `callback` function will be an array containing
@@ -861,14 +861,14 @@ declare module "dns" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
/**
* The resolver instance will send its requests from the specified IP address.

View File

@@ -10,12 +10,12 @@ declare module "dns/promises" {
LookupOneOptions,
LookupAllOptions,
LookupOptions,
AnyRecord,
CaaRecord,
MxRecord,
NaptrRecord,
SoaRecord,
SrvRecord,
// AnyRecord,
// CaaRecord,
// MxRecord,
// NaptrRecord,
// SoaRecord,
// SrvRecord,
ResolveWithTtlOptions,
RecordWithTtl,
ResolveOptions,
@@ -134,30 +134,30 @@ declare module "dns/promises" {
* @param [rrtype='A'] Resource record type.
*/
function resolve(hostname: string): Promise<string[]>;
function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
function resolve(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
// function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
// function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
// function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
// function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function resolve(hostname: string, rrtype: "SRV"): Promise<SrvRecord[]>;
// function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
// function resolve(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
/**
* Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4
* addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -223,20 +223,20 @@ declare module "dns/promises" {
* certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
function resolveCaa(hostname: string): Promise<CaaRecord[]>;
// function resolveCaa(hostname: string): Promise<CaaRecord[]>;
/**
* Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success,
* the `Promise` is resolved with an array of canonical name records available for
* the `hostname` (e.g. `['bar.example.com']`).
* @since v10.6.0
*/
function resolveCname(hostname: string): Promise<string[]>;
// function resolveCname(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects
* containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v10.6.0
*/
function resolveMx(hostname: string): Promise<MxRecord[]>;
// function resolveMx(hostname: string): Promise<MxRecord[]>;
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array
* of objects with the following properties:
@@ -260,19 +260,19 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
// function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server
* records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`).
* @since v10.6.0
*/
function resolveNs(hostname: string): Promise<string[]>;
// function resolveNs(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings
* containing the reply records.
* @since v10.6.0
*/
function resolvePtr(hostname: string): Promise<string[]>;
// function resolvePtr(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. On success, the `Promise` is resolved with an object with the
@@ -299,7 +299,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSoa(hostname: string): Promise<SoaRecord>;
// function resolveSoa(hostname: string): Promise<SoaRecord>;
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with
* the following properties:
@@ -319,7 +319,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
// function resolveSrv(hostname: string): Promise<SrvRecord[]>;
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array
* of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -327,7 +327,7 @@ declare module "dns/promises" {
* treated separately.
* @since v10.6.0
*/
function resolveTxt(hostname: string): Promise<string[][]>;
// function resolveTxt(hostname: string): Promise<string[][]>;
/**
* Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an
* array of host names.
@@ -384,14 +384,14 @@ declare module "dns/promises" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
// setLocalAddress(ipv4?: string, ipv6?: string): void;
// setServers: typeof setServers;

View File

@@ -24,7 +24,6 @@ declare module "fs/promises" {
WriteFileOptions,
SimlinkType,
Abortable,
RmOptions,
} from "node:fs";
interface FlagAndOpenMode {
@@ -671,12 +670,6 @@ declare module "fs/promises" {
| BufferEncoding
| null,
): Promise<string | Buffer>;
/**
* Asynchronously removes files and directories (modeled on the standard POSIX `rm`utility). No arguments other than a possible exception are given to the
* completion callback.
* @since v14.14.0
*/
export function rm(path: PathLike, options?: RmOptions): Promise<void>;
}
declare module "node:fs/promises" {

View File

@@ -318,9 +318,7 @@ interface EncodeIntoResult {
interface Process {
/**
* A Node.js LTS version
*
* To see the current Bun version, use {@link Bun.version}
* The current version of Bun
*/
version: string;
/**
@@ -344,7 +342,7 @@ interface Process {
arch: Architecture;
platform: Platform;
argv: string[];
execArgv: string[];
// execArgv: string[];
env: Bun.Env;
/** Whether you are using Bun */
@@ -393,10 +391,6 @@ interface Process {
* @returns Bun process's file mode creation mask.
*/
umask(mask?: number): number;
emitWarning(warning: string | Error /*name?: string, ctor?: Function*/): void;
readonly config: Object;
}
declare var process: Process;
@@ -834,21 +828,6 @@ interface RequestInit {
timeout?: boolean;
}
interface FetchRequestInit extends RequestInit {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
verbose?: boolean;
/**
* Override http_proxy or HTTPS_PROXY
* This is a custom property that is not part of the Fetch API specification.
*/
proxy?: string;
}
/**
* [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) represents an HTTP request.
*
@@ -1253,8 +1232,19 @@ declare function clearTimeout(id?: number): void;
*
*/
declare function fetch(
url: string | URL,
init?: FetchRequestInit,
url: string,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
/**
@@ -1268,7 +1258,21 @@ declare function fetch(
*
*/
// tslint:disable-next-line:unified-signatures
declare function fetch(request: Request, init?: RequestInit): Promise<Response>;
declare function fetch(
request: Request,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
declare function queueMicrotask(callback: (...args: any[]) => void): void;
/**

View File

@@ -0,0 +1,15 @@
Can reference images included here in code comments, ex
```ts
/**
* ## Large headline
*
*
* **Images** are relative to images/ directory
*![image_description](media://image_filehere.gif)
*
*/
export class MyUtil<T = { BOT_TOKEN: string }> {
constructor(public config: T) {}
}
```

View File

@@ -5,39 +5,32 @@
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/// <reference no-default-lib="true" />
/// <reference lib="esnext" />
/// <reference path="./assert.d.ts" />
/// <reference path="./buffer.d.ts" />
/// <reference path="./bun-test.d.ts" />
/// <reference path="./bun.d.ts" />
/// <reference path="./child_process.d.ts" />
/// <reference path="./constants.d.ts" />
/// <reference path="./crypto.d.ts" />
/// <reference path="./dns.d.ts" />
/// <reference path="./dns/promises.d.ts" />
/// <reference path="./domain.d.ts" />
/// <reference path="./events.d.ts" />
/// <reference path="./buffer.d.ts" />
/// <reference path="./sqlite.d.ts" />
/// <reference path="./ffi.d.ts" />
/// <reference path="./fs.d.ts" />
/// <reference path="./fs/promises.d.ts" />
/// <reference path="./globals.d.ts" />
/// <reference path="./html-rewriter.d.ts" />
/// <reference path="./http.d.ts" />
/// <reference path="./jsc.d.ts" />
/// <reference path="./module.d.ts" />
/// <reference path="./net.d.ts" />
/// <reference path="./os.d.ts" />
/// <reference path="./globals.d.ts" />
/// <reference path="./path.d.ts" />
/// <reference path="./punycode.d.ts" />
/// <reference path="./querystring.d.ts" />
/// <reference path="./readline.d.ts" />
/// <reference path="./readline/promises.d.ts" />
/// <reference path="./sqlite.d.ts" />
/// <reference path="./stream.d.ts" />
/// <reference path="./string_decoder.d.ts" />
/// <reference path="./supports-color.d.ts" />
/// <reference path="./timers.d.ts" />
/// <reference path="./tls.d.ts" />
/// <reference path="./tty.d.ts" />
/// <reference path="./url.d.ts" />
/// <reference path="./bun-test.d.ts" />
/// <reference path="./jsc.d.ts" />
/// <reference path="./assert.d.ts" />
/// <reference path="./events.d.ts" />
/// <reference path="./os.d.ts" />
/// <reference path="./domain.d.ts" />
/// <reference path="./util.d.ts" />
/// <reference path="./querystring.d.ts" />
/// <reference path="./string_decoder.d.ts" />
/// <reference path="./timers.d.ts" />
/// <reference path="./stream.d.ts" />
/// <reference path="./crypto.d.ts" />
/// <reference path="./child_process.d.ts" />
/// <reference path="./constants.d.ts" />
/// <reference path="./url.d.ts" />
/// <reference path="./tty.d.ts" />
/// <reference path="./http.d.ts" />
/// <reference path="./punycode.d.ts" />
/// <reference path="./zlib.d.ts" />
/// <reference path="./supports-color.d.ts" />

View File

@@ -18,16 +18,16 @@ declare module "net" {
Abortable,
// EventEmitter
} from "node:events";
import * as dns from "node:dns";
type LookupFunction = (
hostname: string,
options: dns.LookupOneOptions,
callback: (
err: ErrnoException | null,
address: string,
family: number,
) => void,
) => void;
// import * as dns from "node:dns";
// type LookupFunction = (
// hostname: string,
// options: dns.LookupOneOptions,
// callback: (
// err: NodeJS.ErrnoException | null,
// address: string,
// family: number,
// ) => void,
// ) => void;
interface AddressInfo {
address: string;
family: string;

View File

@@ -2,7 +2,7 @@
"name": "bun-types",
"types": "index.d.ts",
"private": true,
"repository": "https://github.com/oven-sh/bun",
"repository": "https://github.com/oven-sh/bun-types",
"scripts": {
"build": "rm -rf ./dist && bun run bundle && bun run fmt",
"bundle": "bun scripts/bundle.ts ./dist",

View File

@@ -1,700 +0,0 @@
/**
* The `readline` module provides an interface for reading data from a `Readable` stream (such as `process.stdin`) one line at a time.
*
* To use the promise-based APIs:
*
* ```js
* import * as readline from 'node:readline/promises';
* ```
*
* To use the callback and sync APIs:
*
* ```js
* import * as readline from 'node:readline';
* ```
*
* The following simple example illustrates the basic use of the `readline` module.
*
* ```js
* import * as readline from 'node:readline/promises';
* import { stdin as input, stdout as output } from 'node:process';
*
* const rl = readline.createInterface({ input, output });
*
* const answer = await rl.question('What do you think of Node.js? ');
*
* console.log(`Thank you for your valuable feedback: ${answer}`);
*
* rl.close();
* ```
*
* Once this code is invoked, the Node.js application will not terminate until the`readline.Interface` is closed because the interface waits for data to be
* received on the `input` stream.
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline.js)
*/
declare module "readline" {
import { Readable, Writable } from "node:stream";
import { Abortable, EventEmitter } from "node:events";
import * as promises from "node:readline/promises";
export { promises };
export interface Key {
sequence?: string | undefined;
name?: string | undefined;
ctrl?: boolean | undefined;
meta?: boolean | undefined;
shift?: boolean | undefined;
}
/**
* Instances of the `readline.Interface` class are constructed using the`readline.createInterface()` method. Every instance is associated with a
* single `input` `Readable` stream and a single `output` `Writable` stream.
* The `output` stream is used to print prompts for user input that arrives on,
* and is read from, the `input` stream.
* @since v0.1.104
*/
export class Interface extends EventEmitter {
readonly terminal: boolean;
/**
* The current input data being processed by node.
*
* This can be used when collecting input from a TTY stream to retrieve the
* current value that has been processed thus far, prior to the `line` event
* being emitted. Once the `line` event has been emitted, this property will
* be an empty string.
*
* Be aware that modifying the value during the instance runtime may have
* unintended consequences if `rl.cursor` is not also controlled.
*
* **If not using a TTY stream for input, use the `'line'` event.**
*
* One possible use case would be as follows:
*
* ```js
* const values = ['lorem ipsum', 'dolor sit amet'];
* const rl = readline.createInterface(process.stdin);
* const showResults = debounce(() => {
* console.log(
* '\n',
* values.filter((val) => val.startsWith(rl.line)).join(' ')
* );
* }, 300);
* process.stdin.on('keypress', (c, k) => {
* showResults();
* });
* ```
* @since v0.1.98
*/
readonly line: string;
/**
* The cursor position relative to `rl.line`.
*
* This will track where the current cursor lands in the input string, when
* reading input from a TTY stream. The position of cursor determines the
* portion of the input string that will be modified as input is processed,
* as well as the column where the terminal caret will be rendered.
* @since v0.1.98
*/
readonly cursor: number;
/**
* NOTE: According to the documentation:
*
* > Instances of the `readline.Interface` class are constructed using the
* > `readline.createInterface()` method.
*
* @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface
*/
protected constructor(
input: Readable,
output?: Writable,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
);
/**
* NOTE: According to the documentation:
*
* > Instances of the `readline.Interface` class are constructed using the
* > `readline.createInterface()` method.
*
* @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface
*/
protected constructor(options: ReadLineOptions);
/**
* The `rl.getPrompt()` method returns the current prompt used by `rl.prompt()`.
* @since v15.3.0
* @return the current prompt string
*/
getPrompt(): string;
/**
* The `rl.setPrompt()` method sets the prompt that will be written to `output`whenever `rl.prompt()` is called.
* @since v0.1.98
*/
setPrompt(prompt: string): void;
/**
* The `rl.prompt()` method writes the `readline.Interface` instances configured`prompt` to a new line in `output` in order to provide a user with a new
* location at which to provide input.
*
* When called, `rl.prompt()` will resume the `input` stream if it has been
* paused.
*
* If the `readline.Interface` was created with `output` set to `null` or`undefined` the prompt is not written.
* @since v0.1.98
* @param preserveCursor If `true`, prevents the cursor placement from being reset to `0`.
*/
prompt(preserveCursor?: boolean): void;
/**
* The `rl.question()` method displays the `query` by writing it to the `output`,
* waits for user input to be provided on `input`, then invokes the `callback`function passing the provided input as the first argument.
*
* When called, `rl.question()` will resume the `input` stream if it has been
* paused.
*
* If the `readline.Interface` was created with `output` set to `null` or`undefined` the `query` is not written.
*
* The `callback` function passed to `rl.question()` does not follow the typical
* pattern of accepting an `Error` object or `null` as the first argument.
* The `callback` is called with the provided answer as the only argument.
*
* Example usage:
*
* ```js
* rl.question('What is your favorite food? ', (answer) => {
* console.log(`Oh, so your favorite food is ${answer}`);
* });
* ```
*
* Using an `AbortController` to cancel a question.
*
* ```js
* const ac = new AbortController();
* const signal = ac.signal;
*
* rl.question('What is your favorite food? ', { signal }, (answer) => {
* console.log(`Oh, so your favorite food is ${answer}`);
* });
*
* signal.addEventListener('abort', () => {
* console.log('The food question timed out');
* }, { once: true });
*
* setTimeout(() => ac.abort(), 10000);
* ```
*
* If this method is invoked as it's util.promisify()ed version, it returns a
* Promise that fulfills with the answer. If the question is canceled using
* an `AbortController` it will reject with an `AbortError`.
*
* ```js
* const util = require('util');
* const question = util.promisify(rl.question).bind(rl);
*
* async function questionExample() {
* try {
* const answer = await question('What is you favorite food? ');
* console.log(`Oh, so your favorite food is ${answer}`);
* } catch (err) {
* console.error('Question rejected', err);
* }
* }
* questionExample();
* ```
* @since v0.3.3
* @param query A statement or query to write to `output`, prepended to the prompt.
* @param callback A callback function that is invoked with the user's input in response to the `query`.
*/
question(query: string, callback: (answer: string) => void): void;
question(
query: string,
options: Abortable,
callback: (answer: string) => void,
): void;
/**
* The `rl.pause()` method pauses the `input` stream, allowing it to be resumed
* later if necessary.
*
* Calling `rl.pause()` does not immediately pause other events (including`'line'`) from being emitted by the `readline.Interface` instance.
* @since v0.3.4
*/
pause(): this;
/**
* The `rl.resume()` method resumes the `input` stream if it has been paused.
* @since v0.3.4
*/
resume(): this;
/**
* The `rl.close()` method closes the `readline.Interface` instance and
* relinquishes control over the `input` and `output` streams. When called,
* the `'close'` event will be emitted.
*
* Calling `rl.close()` does not immediately stop other events (including `'line'`)
* from being emitted by the `readline.Interface` instance.
* @since v0.1.98
*/
close(): void;
/**
* The `rl.write()` method will write either `data` or a key sequence identified
* by `key` to the `output`. The `key` argument is supported only if `output` is
* a `TTY` text terminal. See `TTY keybindings` for a list of key
* combinations.
*
* If `key` is specified, `data` is ignored.
*
* When called, `rl.write()` will resume the `input` stream if it has been
* paused.
*
* If the `readline.Interface` was created with `output` set to `null` or`undefined` the `data` and `key` are not written.
*
* ```js
* rl.write('Delete this!');
* // Simulate Ctrl+U to delete the line written previously
* rl.write(null, { ctrl: true, name: 'u' });
* ```
*
* The `rl.write()` method will write the data to the `readline` `Interface`'s`input`_as if it were provided by the user_.
* @since v0.1.98
*/
write(data: string | Buffer, key?: Key): void;
write(data: undefined | null | string | Buffer, key: Key): void;
/**
* Returns the real position of the cursor in relation to the input
* prompt + string. Long input (wrapping) strings, as well as multiple
* line prompts are included in the calculations.
* @since v13.5.0, v12.16.0
*/
getCursorPos(): CursorPos;
/**
* events.EventEmitter
* 1. close
* 2. line
* 3. pause
* 4. resume
* 5. SIGCONT
* 6. SIGINT
* 7. SIGTSTP
* 8. history
*/
addListener(event: string, listener: (...args: any[]) => void): this;
addListener(event: "close", listener: () => void): this;
addListener(event: "line", listener: (input: string) => void): this;
addListener(event: "pause", listener: () => void): this;
addListener(event: "resume", listener: () => void): this;
addListener(event: "SIGCONT", listener: () => void): this;
addListener(event: "SIGINT", listener: () => void): this;
addListener(event: "SIGTSTP", listener: () => void): this;
addListener(event: "history", listener: (history: string[]) => void): this;
emit(event: string | symbol, ...args: any[]): boolean;
emit(event: "close"): boolean;
emit(event: "line", input: string): boolean;
emit(event: "pause"): boolean;
emit(event: "resume"): boolean;
emit(event: "SIGCONT"): boolean;
emit(event: "SIGINT"): boolean;
emit(event: "SIGTSTP"): boolean;
emit(event: "history", history: string[]): boolean;
on(event: string, listener: (...args: any[]) => void): this;
on(event: "close", listener: () => void): this;
on(event: "line", listener: (input: string) => void): this;
on(event: "pause", listener: () => void): this;
on(event: "resume", listener: () => void): this;
on(event: "SIGCONT", listener: () => void): this;
on(event: "SIGINT", listener: () => void): this;
on(event: "SIGTSTP", listener: () => void): this;
on(event: "history", listener: (history: string[]) => void): this;
once(event: string, listener: (...args: any[]) => void): this;
once(event: "close", listener: () => void): this;
once(event: "line", listener: (input: string) => void): this;
once(event: "pause", listener: () => void): this;
once(event: "resume", listener: () => void): this;
once(event: "SIGCONT", listener: () => void): this;
once(event: "SIGINT", listener: () => void): this;
once(event: "SIGTSTP", listener: () => void): this;
once(event: "history", listener: (history: string[]) => void): this;
prependListener(event: string, listener: (...args: any[]) => void): this;
prependListener(event: "close", listener: () => void): this;
prependListener(event: "line", listener: (input: string) => void): this;
prependListener(event: "pause", listener: () => void): this;
prependListener(event: "resume", listener: () => void): this;
prependListener(event: "SIGCONT", listener: () => void): this;
prependListener(event: "SIGINT", listener: () => void): this;
prependListener(event: "SIGTSTP", listener: () => void): this;
prependListener(
event: "history",
listener: (history: string[]) => void,
): this;
prependOnceListener(
event: string,
listener: (...args: any[]) => void,
): this;
prependOnceListener(event: "close", listener: () => void): this;
prependOnceListener(event: "line", listener: (input: string) => void): this;
prependOnceListener(event: "pause", listener: () => void): this;
prependOnceListener(event: "resume", listener: () => void): this;
prependOnceListener(event: "SIGCONT", listener: () => void): this;
prependOnceListener(event: "SIGINT", listener: () => void): this;
prependOnceListener(event: "SIGTSTP", listener: () => void): this;
prependOnceListener(
event: "history",
listener: (history: string[]) => void,
): this;
[Symbol.asyncIterator](): AsyncIterableIterator<string>;
}
export type ReadLine = Interface; // type forwarded for backwards compatibility
export type Completer = (line: string) => CompleterResult;
export type AsyncCompleter = (
line: string,
callback: (err?: null | Error, result?: CompleterResult) => void,
) => void;
export type CompleterResult = [string[], string];
export interface ReadLineOptions {
input: Readable;
output?: Writable | undefined;
completer?: Completer | AsyncCompleter | undefined;
terminal?: boolean | undefined;
/**
* Initial list of history lines. This option makes sense
* only if `terminal` is set to `true` by the user or by an internal `output`
* check, otherwise the history caching mechanism is not initialized at all.
* @default []
*/
history?: string[] | undefined;
historySize?: number | undefined;
prompt?: string | undefined;
crlfDelay?: number | undefined;
/**
* If `true`, when a new input line added
* to the history list duplicates an older one, this removes the older line
* from the list.
* @default false
*/
removeHistoryDuplicates?: boolean | undefined;
escapeCodeTimeout?: number | undefined;
tabSize?: number | undefined;
}
/**
* The `readline.createInterface()` method creates a new `readline.Interface`instance.
*
* ```js
* const readline = require('readline');
* const rl = readline.createInterface({
* input: process.stdin,
* output: process.stdout
* });
* ```
*
* Once the `readline.Interface` instance is created, the most common case is to
* listen for the `'line'` event:
*
* ```js
* rl.on('line', (line) => {
* console.log(`Received: ${line}`);
* });
* ```
*
* If `terminal` is `true` for this instance then the `output` stream will get
* the best compatibility if it defines an `output.columns` property and emits
* a `'resize'` event on the `output` if or when the columns ever change
* (`process.stdout` does this automatically when it is a TTY).
*
* When creating a `readline.Interface` using `stdin` as input, the program
* will not terminate until it receives `EOF` (Ctrl+D on
* Linux/macOS, Ctrl+Z followed by Return on
* Windows).
* If you want your application to exit without waiting for user input, you can `unref()` the standard input stream:
*
* ```js
* process.stdin.unref();
* ```
* @since v0.1.98
*/
export function createInterface(
input: Readable,
output?: Writable,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;
export function createInterface(options: ReadLineOptions): Interface;
/**
* The `readline.emitKeypressEvents()` method causes the given `Readable` stream to begin emitting `'keypress'` events corresponding to received input.
*
* Optionally, `interface` specifies a `readline.Interface` instance for which
* autocompletion is disabled when copy-pasted input is detected.
*
* If the `stream` is a `TTY`, then it must be in raw mode.
*
* This is automatically called by any readline instance on its `input` if the`input` is a terminal. Closing the `readline` instance does not stop
* the `input` from emitting `'keypress'` events.
*
* ```js
* readline.emitKeypressEvents(process.stdin);
* if (process.stdin.isTTY)
* process.stdin.setRawMode(true);
* ```
*
* ## Example: Tiny CLI
*
* The following example illustrates the use of `readline.Interface` class to
* implement a small command-line interface:
*
* ```js
* const readline = require('readline');
* const rl = readline.createInterface({
* input: process.stdin,
* output: process.stdout,
* prompt: 'OHAI> '
* });
*
* rl.prompt();
*
* rl.on('line', (line) => {
* switch (line.trim()) {
* case 'hello':
* console.log('world!');
* break;
* default:
* console.log(`Say what? I might have heard '${line.trim()}'`);
* break;
* }
* rl.prompt();
* }).on('close', () => {
* console.log('Have a great day!');
* process.exit(0);
* });
* ```
*
* ## Example: Read file stream line-by-Line
*
* A common use case for `readline` is to consume an input file one line at a
* time. The easiest way to do so is leveraging the `fs.ReadStream` API as
* well as a `for await...of` loop:
*
* ```js
* const fs = require('fs');
* const readline = require('readline');
*
* async function processLineByLine() {
* const fileStream = fs.createReadStream('input.txt');
*
* const rl = readline.createInterface({
* input: fileStream,
* crlfDelay: Infinity
* });
* // Note: we use the crlfDelay option to recognize all instances of CR LF
* // ('\r\n') in input.txt as a single line break.
*
* for await (const line of rl) {
* // Each line in input.txt will be successively available here as `line`.
* console.log(`Line from file: ${line}`);
* }
* }
*
* processLineByLine();
* ```
*
* Alternatively, one could use the `'line'` event:
*
* ```js
* const fs = require('fs');
* const readline = require('readline');
*
* const rl = readline.createInterface({
* input: fs.createReadStream('sample.txt'),
* crlfDelay: Infinity
* });
*
* rl.on('line', (line) => {
* console.log(`Line from file: ${line}`);
* });
* ```
*
* Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied:
*
* ```js
* const { once } = require('events');
* const { createReadStream } = require('fs');
* const { createInterface } = require('readline');
*
* (async function processLineByLine() {
* try {
* const rl = createInterface({
* input: createReadStream('big-file.txt'),
* crlfDelay: Infinity
* });
*
* rl.on('line', (line) => {
* // Process the line.
* });
*
* await once(rl, 'close');
*
* console.log('File processed.');
* } catch (err) {
* console.error(err);
* }
* })();
* ```
* @since v0.7.7
*/
export function emitKeypressEvents(
stream: Readable,
readlineInterface?: Interface,
): void;
export type Direction = -1 | 0 | 1;
export interface CursorPos {
rows: number;
cols: number;
}
/**
* The `readline.clearLine()` method clears current line of given `TTY` stream
* in a specified direction identified by `dir`.
* @since v0.7.7
* @param callback Invoked once the operation completes.
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearLine(
stream: Writable,
dir: Direction,
callback?: () => void,
): boolean;
/**
* The `readline.clearScreenDown()` method clears the given `TTY` stream from
* the current position of the cursor down.
* @since v0.7.7
* @param callback Invoked once the operation completes.
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearScreenDown(
stream: Writable,
callback?: () => void,
): boolean;
/**
* The `readline.cursorTo()` method moves cursor to the specified position in a
* given `TTY` `stream`.
* @since v0.7.7
* @param callback Invoked once the operation completes.
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function cursorTo(
stream: Writable,
x: number,
y?: number,
callback?: () => void,
): boolean;
/**
* The `readline.moveCursor()` method moves the cursor _relative_ to its current
* position in a given `TTY` `stream`.
*
* ## Example: Tiny CLI
*
* The following example illustrates the use of `readline.Interface` class to
* implement a small command-line interface:
*
* ```js
* const readline = require('readline');
* const rl = readline.createInterface({
* input: process.stdin,
* output: process.stdout,
* prompt: 'OHAI> '
* });
*
* rl.prompt();
*
* rl.on('line', (line) => {
* switch (line.trim()) {
* case 'hello':
* console.log('world!');
* break;
* default:
* console.log(`Say what? I might have heard '${line.trim()}'`);
* break;
* }
* rl.prompt();
* }).on('close', () => {
* console.log('Have a great day!');
* process.exit(0);
* });
* ```
*
* ## Example: Read file stream line-by-Line
*
* A common use case for `readline` is to consume an input file one line at a
* time. The easiest way to do so is leveraging the `fs.ReadStream` API as
* well as a `for await...of` loop:
*
* ```js
* const fs = require('fs');
* const readline = require('readline');
*
* async function processLineByLine() {
* const fileStream = fs.createReadStream('input.txt');
*
* const rl = readline.createInterface({
* input: fileStream,
* crlfDelay: Infinity
* });
* // Note: we use the crlfDelay option to recognize all instances of CR LF
* // ('\r\n') in input.txt as a single line break.
*
* for await (const line of rl) {
* // Each line in input.txt will be successively available here as `line`.
* console.log(`Line from file: ${line}`);
* }
* }
*
* processLineByLine();
* ```
*
* Alternatively, one could use the `'line'` event:
*
* ```js
* const fs = require('fs');
* const readline = require('readline');
*
* const rl = readline.createInterface({
* input: fs.createReadStream('sample.txt'),
* crlfDelay: Infinity
* });
*
* rl.on('line', (line) => {
* console.log(`Line from file: ${line}`);
* });
* ```
*
* Currently, `for await...of` loop can be a bit slower. If `async` / `await`flow and speed are both essential, a mixed approach can be applied:
*
* ```js
* const { once } = require('events');
* const { createReadStream } = require('fs');
* const { createInterface } = require('readline');
*
* (async function processLineByLine() {
* try {
* const rl = createInterface({
* input: createReadStream('big-file.txt'),
* crlfDelay: Infinity
* });
*
* rl.on('line', (line) => {
* // Process the line.
* });
*
* await once(rl, 'close');
*
* console.log('File processed.');
* } catch (err) {
* console.error(err);
* }
* })();
* ```
* @since v0.7.7
* @param callback Invoked once the operation completes.
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function moveCursor(
stream: Writable,
dx: number,
dy: number,
callback?: () => void,
): boolean;
}
declare module "node:readline" {
export * from "readline";
}

View File

@@ -1,150 +0,0 @@
/**
* The `readline/promise` module provides an API for reading lines of input from a Readable stream one line at a time.
*
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline/promises.js)
* @since v17.0.0
*/
declare module "readline/promises" {
import { Readable, Writable } from "node:stream";
import {
Interface as _Interface,
ReadLineOptions,
Completer,
AsyncCompleter,
Direction,
} from "node:readline";
import { Abortable } from "node:events";
class Interface extends _Interface {
/**
* The rl.question() method displays the query by writing it to the output, waits for user input to be provided on input,
* then invokes the callback function passing the provided input as the first argument.
*
* When called, rl.question() will resume the input stream if it has been paused.
*
* If the readlinePromises.Interface was created with output set to null or undefined the query is not written.
*
* If the question is called after rl.close(), it returns a rejected promise.
*
* Example usage:
*
* ```js
* const answer = await rl.question('What is your favorite food? ');
* console.log(`Oh, so your favorite food is ${answer}`);
* ```
*
* Using an AbortSignal to cancel a question.
*
* ```js
* const signal = AbortSignal.timeout(10_000);
*
* signal.addEventListener('abort', () => {
* console.log('The food question timed out');
* }, { once: true });
*
* const answer = await rl.question('What is your favorite food? ', { signal });
* console.log(`Oh, so your favorite food is ${answer}`);
* ```
*
* @since v17.0.0
* @param query A statement or query to write to output, prepended to the prompt.
*/
question(query: string): Promise<string>;
question(query: string, options: Abortable): Promise<string>;
}
class Readline {
/**
* @param stream A TTY stream.
*/
constructor(stream: Writable, options?: { autoCommit?: boolean });
/**
* The `rl.clearLine()` method adds to the internal list of pending action an action that clears current line of the associated `stream` in a specified direction identified by `dir`.
* Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor.
*/
clearLine(dir: Direction): this;
/**
* The `rl.clearScreenDown()` method adds to the internal list of pending action an action that clears the associated `stream` from the current position of the cursor down.
* Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor.
*/
clearScreenDown(): this;
/**
* The `rl.commit()` method sends all the pending actions to the associated `stream` and clears the internal list of pending actions.
*/
commit(): Promise<void>;
/**
* The `rl.cursorTo()` method adds to the internal list of pending action an action that moves cursor to the specified position in the associated `stream`.
* Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor.
*/
cursorTo(x: number, y?: number): this;
/**
* The `rl.moveCursor()` method adds to the internal list of pending action an action that moves the cursor relative to its current position in the associated `stream`.
* Call `rl.commit()` to see the effect of this method, unless autoCommit: true was passed to the constructor.
*/
moveCursor(dx: number, dy: number): this;
/**
* The `rl.rollback()` method clears the internal list of pending actions without sending it to the associated `stream`.
*/
rollback(): this;
}
/**
* The `readlinePromises.createInterface()` method creates a new `readlinePromises.Interface` instance.
*
* ```js
* const readlinePromises = require('node:readline/promises');
* const rl = readlinePromises.createInterface({
* input: process.stdin,
* output: process.stdout
* });
* ```
*
* Once the `readlinePromises.Interface` instance is created, the most common case is to listen for the `'line'` event:
*
* ```js
* rl.on('line', (line) => {
* console.log(`Received: ${line}`);
* });
* ```
*
* If `terminal` is `true` for this instance then the `output` stream will get the best compatibility if it defines an `output.columns` property,
* and emits a `'resize'` event on the `output`, if or when the columns ever change (`process.stdout` does this automatically when it is a TTY).
*
* ## Use of the `completer` function
*
* The `completer` function takes the current line entered by the user as an argument, and returns an `Array` with 2 entries:
*
* - An Array with matching entries for the completion.
* - The substring that was used for the matching.
*
* For instance: `[[substr1, substr2, ...], originalsubstring]`.
*
* ```js
* function completer(line) {
* const completions = '.help .error .exit .quit .q'.split(' ');
* const hits = completions.filter((c) => c.startsWith(line));
* // Show all completions if none found
* return [hits.length ? hits : completions, line];
* }
* ```
*
* The `completer` function can also returns a `Promise`, or be asynchronous:
*
* ```js
* async function completer(linePartial) {
* await someAsyncWork();
* return [['123'], linePartial];
* }
* ```
*/
function createInterface(
input: Readable,
output?: Writable,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;
function createInterface(options: ReadLineOptions): Interface;
}
declare module "node:readline/promises" {
export * from "readline/promises";
}

View File

@@ -53,7 +53,7 @@ const packageJSON = {
files: ["types.d.ts", "README.md"],
private: false,
keywords: ["bun", "bun.js", "types"],
repository: "https://github.com/oven-sh/bun",
repository: "https://github.com/oven-sh/bun-types",
homepage: "https://bun.sh",
};

View File

@@ -11,10 +11,3 @@ declare global {
expectType<"WHATEVER">(process.env.WHATEVER);
export {};
new Bun.Transpiler({
macros: {
"react-relay": {
graphql: "bun-macro-relay/bun-macro-relay.tsx",
},
},
});

View File

@@ -1,11 +0,0 @@
import * as net from "node:net";
const socket = net.connect({
port: 80,
host: "localhost",
});
socket.connect({
port: 80,
host: "localhost",
});

View File

@@ -1,9 +0,0 @@
import * as readline from "node:readline/promises";
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: true,
});
const answer = await rl.question("What is your age?\n");
console.log("Your age is: " + answer);

View File

@@ -1,6 +0,0 @@
import tls from "node:tls";
tls.connect({
host: "localhost",
port: 80,
});

View File

@@ -10,18 +10,11 @@
*/
declare module "timers" {
class Timer {
ref(): void;
unref(): void;
hasRef(): boolean;
}
const _exported: {
clearTimeout: (timer: Timer | number) => void;
clearInterval: (timer: Timer | number) => void;
setInterval: (cb: CallableFunction, msDelay: number, ...args: any[]) => Timer;
setTimeout: (cb: CallableFunction, msDelay: number, ...args: any[]) => Timer;
setImmediate: (cb: CallableFunction, ...args: any[]) => Timer;
clearTimeout: typeof clearTimeout;
clearInterval: typeof clearInterval;
setTimeout: typeof setTimeout;
setInterval: typeof setInterval;
};
export = _exported;
}

File diff suppressed because it is too large Load Diff

View File

@@ -5,9 +5,8 @@ const Output = @import("output.zig");
const use_mimalloc = @import("bun").use_mimalloc;
const StringTypes = @import("./string_types.zig");
const Mimalloc = @import("bun").Mimalloc;
const bun = @import("bun");
const BASE_VERSION = "0.5";
const BASE_VERSION = "0.4";
pub const build_id = std.fmt.parseInt(u64, std.mem.trim(u8, @embedFile("./build-id"), "\n \r\t"), 10) catch unreachable;
pub const package_json_version = if (Environment.isDebug)
@@ -144,8 +143,8 @@ pub const BunInfo = struct {
framework_version: string = "",
const Analytics = @import("./analytics/analytics_thread.zig");
const JSON = bun.JSON;
const JSAst = bun.JSAst;
const JSON = @import("./json_parser.zig");
const JSAst = @import("./js_ast.zig");
pub fn generate(comptime Bundler: type, bundler: Bundler, allocator: std.mem.Allocator) !JSAst.Expr {
var info = BunInfo{
.bun_version = Global.package_json_version,

View File

@@ -298,7 +298,6 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
if (!loaded) {
instance = Self{
.allocator = allocator,
.backing_buf_used = 0,
};
loaded = true;
}

View File

@@ -293,10 +293,10 @@ pub const GenerateHeader = struct {
}
_ = forOS();
const release = std.mem.span(&linux_os_name.release);
const sliced_string = Semver.SlicedString.init(release, release);
const result = Semver.Version.parse(sliced_string, bun.default_allocator);
var sliced_string = Semver.SlicedString.init(release, release);
var result = Semver.Version.parse(sliced_string, bun.default_allocator);
// we only care about major, minor, patch so we don't care about the string
return result.version.fill();
return result.version;
}
pub fn forLinux() Analytics.Platform {

1
src/api/.gitignore vendored
View File

@@ -1 +0,0 @@
*.wasm

319
src/api/schema.d.ts generated vendored
View File

@@ -702,90 +702,228 @@ export interface BunInstall {
global_bin_dir?: string;
}
export declare function encodeStackFrame(message: StackFrame, bb: ByteBuffer): void;
export declare function encodeStackFrame(
message: StackFrame,
bb: ByteBuffer,
): void;
export declare function decodeStackFrame(buffer: ByteBuffer): StackFrame;
export declare function encodeStackFramePosition(message: StackFramePosition, bb: ByteBuffer): void;
export declare function decodeStackFramePosition(buffer: ByteBuffer): StackFramePosition;
export declare function encodeSourceLine(message: SourceLine, bb: ByteBuffer): void;
export declare function encodeStackFramePosition(
message: StackFramePosition,
bb: ByteBuffer,
): void;
export declare function decodeStackFramePosition(
buffer: ByteBuffer,
): StackFramePosition;
export declare function encodeSourceLine(
message: SourceLine,
bb: ByteBuffer,
): void;
export declare function decodeSourceLine(buffer: ByteBuffer): SourceLine;
export declare function encodeStackTrace(message: StackTrace, bb: ByteBuffer): void;
export declare function encodeStackTrace(
message: StackTrace,
bb: ByteBuffer,
): void;
export declare function decodeStackTrace(buffer: ByteBuffer): StackTrace;
export declare function encodeJSException(message: JSException, bb: ByteBuffer): void;
export declare function encodeJSException(
message: JSException,
bb: ByteBuffer,
): void;
export declare function decodeJSException(buffer: ByteBuffer): JSException;
export declare function encodeProblems(message: Problems, bb: ByteBuffer): void;
export declare function decodeProblems(buffer: ByteBuffer): Problems;
export declare function encodeRouter(message: Router, bb: ByteBuffer): void;
export declare function decodeRouter(buffer: ByteBuffer): Router;
export declare function encodeFallbackMessageContainer(message: FallbackMessageContainer, bb: ByteBuffer): void;
export declare function decodeFallbackMessageContainer(buffer: ByteBuffer): FallbackMessageContainer;
export declare function encodeFallbackMessageContainer(
message: FallbackMessageContainer,
bb: ByteBuffer,
): void;
export declare function decodeFallbackMessageContainer(
buffer: ByteBuffer,
): FallbackMessageContainer;
export declare function encodeJSX(message: JSX, bb: ByteBuffer): void;
export declare function decodeJSX(buffer: ByteBuffer): JSX;
export declare function encodeStringPointer(message: StringPointer, bb: ByteBuffer): void;
export declare function encodeStringPointer(
message: StringPointer,
bb: ByteBuffer,
): void;
export declare function decodeStringPointer(buffer: ByteBuffer): StringPointer;
export declare function encodeJavascriptBundledModule(message: JavascriptBundledModule, bb: ByteBuffer): void;
export declare function decodeJavascriptBundledModule(buffer: ByteBuffer): JavascriptBundledModule;
export declare function encodeJavascriptBundledPackage(message: JavascriptBundledPackage, bb: ByteBuffer): void;
export declare function decodeJavascriptBundledPackage(buffer: ByteBuffer): JavascriptBundledPackage;
export declare function encodeJavascriptBundle(message: JavascriptBundle, bb: ByteBuffer): void;
export declare function decodeJavascriptBundle(buffer: ByteBuffer): JavascriptBundle;
export declare function encodeJavascriptBundleContainer(message: JavascriptBundleContainer, bb: ByteBuffer): void;
export declare function decodeJavascriptBundleContainer(buffer: ByteBuffer): JavascriptBundleContainer;
export declare function encodeModuleImportRecord(message: ModuleImportRecord, bb: ByteBuffer): void;
export declare function decodeModuleImportRecord(buffer: ByteBuffer): ModuleImportRecord;
export declare function encodeJavascriptBundledModule(
message: JavascriptBundledModule,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundledModule(
buffer: ByteBuffer,
): JavascriptBundledModule;
export declare function encodeJavascriptBundledPackage(
message: JavascriptBundledPackage,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundledPackage(
buffer: ByteBuffer,
): JavascriptBundledPackage;
export declare function encodeJavascriptBundle(
message: JavascriptBundle,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundle(
buffer: ByteBuffer,
): JavascriptBundle;
export declare function encodeJavascriptBundleContainer(
message: JavascriptBundleContainer,
bb: ByteBuffer,
): void;
export declare function decodeJavascriptBundleContainer(
buffer: ByteBuffer,
): JavascriptBundleContainer;
export declare function encodeModuleImportRecord(
message: ModuleImportRecord,
bb: ByteBuffer,
): void;
export declare function decodeModuleImportRecord(
buffer: ByteBuffer,
): ModuleImportRecord;
export declare function encodeModule(message: Module, bb: ByteBuffer): void;
export declare function decodeModule(buffer: ByteBuffer): Module;
export declare function encodeStringMap(message: StringMap, bb: ByteBuffer): void;
export declare function encodeStringMap(
message: StringMap,
bb: ByteBuffer,
): void;
export declare function decodeStringMap(buffer: ByteBuffer): StringMap;
export declare function encodeLoaderMap(message: LoaderMap, bb: ByteBuffer): void;
export declare function encodeLoaderMap(
message: LoaderMap,
bb: ByteBuffer,
): void;
export declare function decodeLoaderMap(buffer: ByteBuffer): LoaderMap;
export declare function encodeEnvConfig(message: EnvConfig, bb: ByteBuffer): void;
export declare function encodeEnvConfig(
message: EnvConfig,
bb: ByteBuffer,
): void;
export declare function decodeEnvConfig(buffer: ByteBuffer): EnvConfig;
export declare function encodeLoadedEnvConfig(message: LoadedEnvConfig, bb: ByteBuffer): void;
export declare function decodeLoadedEnvConfig(buffer: ByteBuffer): LoadedEnvConfig;
export declare function encodeFrameworkConfig(message: FrameworkConfig, bb: ByteBuffer): void;
export declare function decodeFrameworkConfig(buffer: ByteBuffer): FrameworkConfig;
export declare function encodeFrameworkEntryPoint(message: FrameworkEntryPoint, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPoint(buffer: ByteBuffer): FrameworkEntryPoint;
export declare function encodeFrameworkEntryPointMap(message: FrameworkEntryPointMap, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPointMap(buffer: ByteBuffer): FrameworkEntryPointMap;
export declare function encodeFrameworkEntryPointMessage(message: FrameworkEntryPointMessage, bb: ByteBuffer): void;
export declare function decodeFrameworkEntryPointMessage(buffer: ByteBuffer): FrameworkEntryPointMessage;
export declare function encodeLoadedFramework(message: LoadedFramework, bb: ByteBuffer): void;
export declare function decodeLoadedFramework(buffer: ByteBuffer): LoadedFramework;
export declare function encodeLoadedRouteConfig(message: LoadedRouteConfig, bb: ByteBuffer): void;
export declare function decodeLoadedRouteConfig(buffer: ByteBuffer): LoadedRouteConfig;
export declare function encodeRouteConfig(message: RouteConfig, bb: ByteBuffer): void;
export declare function encodeLoadedEnvConfig(
message: LoadedEnvConfig,
bb: ByteBuffer,
): void;
export declare function decodeLoadedEnvConfig(
buffer: ByteBuffer,
): LoadedEnvConfig;
export declare function encodeFrameworkConfig(
message: FrameworkConfig,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkConfig(
buffer: ByteBuffer,
): FrameworkConfig;
export declare function encodeFrameworkEntryPoint(
message: FrameworkEntryPoint,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPoint(
buffer: ByteBuffer,
): FrameworkEntryPoint;
export declare function encodeFrameworkEntryPointMap(
message: FrameworkEntryPointMap,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPointMap(
buffer: ByteBuffer,
): FrameworkEntryPointMap;
export declare function encodeFrameworkEntryPointMessage(
message: FrameworkEntryPointMessage,
bb: ByteBuffer,
): void;
export declare function decodeFrameworkEntryPointMessage(
buffer: ByteBuffer,
): FrameworkEntryPointMessage;
export declare function encodeLoadedFramework(
message: LoadedFramework,
bb: ByteBuffer,
): void;
export declare function decodeLoadedFramework(
buffer: ByteBuffer,
): LoadedFramework;
export declare function encodeLoadedRouteConfig(
message: LoadedRouteConfig,
bb: ByteBuffer,
): void;
export declare function decodeLoadedRouteConfig(
buffer: ByteBuffer,
): LoadedRouteConfig;
export declare function encodeRouteConfig(
message: RouteConfig,
bb: ByteBuffer,
): void;
export declare function decodeRouteConfig(buffer: ByteBuffer): RouteConfig;
export declare function encodeTransformOptions(message: TransformOptions, bb: ByteBuffer): void;
export declare function decodeTransformOptions(buffer: ByteBuffer): TransformOptions;
export declare function encodeFileHandle(message: FileHandle, bb: ByteBuffer): void;
export declare function encodeTransformOptions(
message: TransformOptions,
bb: ByteBuffer,
): void;
export declare function decodeTransformOptions(
buffer: ByteBuffer,
): TransformOptions;
export declare function encodeFileHandle(
message: FileHandle,
bb: ByteBuffer,
): void;
export declare function decodeFileHandle(buffer: ByteBuffer): FileHandle;
export declare function encodeTransform(message: Transform, bb: ByteBuffer): void;
export declare function encodeTransform(
message: Transform,
bb: ByteBuffer,
): void;
export declare function decodeTransform(buffer: ByteBuffer): Transform;
export declare function encodeScan(message: Scan, bb: ByteBuffer): void;
export declare function decodeScan(buffer: ByteBuffer): Scan;
export declare function encodeScanResult(message: ScanResult, bb: ByteBuffer): void;
export declare function encodeScanResult(
message: ScanResult,
bb: ByteBuffer,
): void;
export declare function decodeScanResult(buffer: ByteBuffer): ScanResult;
export declare function encodeScannedImport(message: ScannedImport, bb: ByteBuffer): void;
export declare function encodeScannedImport(
message: ScannedImport,
bb: ByteBuffer,
): void;
export declare function decodeScannedImport(buffer: ByteBuffer): ScannedImport;
export declare function encodeOutputFile(message: OutputFile, bb: ByteBuffer): void;
export declare function encodeOutputFile(
message: OutputFile,
bb: ByteBuffer,
): void;
export declare function decodeOutputFile(buffer: ByteBuffer): OutputFile;
export declare function encodeTransformResponse(message: TransformResponse, bb: ByteBuffer): void;
export declare function decodeTransformResponse(buffer: ByteBuffer): TransformResponse;
export declare function encodeTransformResponse(
message: TransformResponse,
bb: ByteBuffer,
): void;
export declare function decodeTransformResponse(
buffer: ByteBuffer,
): TransformResponse;
export declare function encodeLocation(message: Location, bb: ByteBuffer): void;
export declare function decodeLocation(buffer: ByteBuffer): Location;
export declare function encodeMessageData(message: MessageData, bb: ByteBuffer): void;
export declare function encodeMessageData(
message: MessageData,
bb: ByteBuffer,
): void;
export declare function decodeMessageData(buffer: ByteBuffer): MessageData;
export declare function encodeMessageMeta(message: MessageMeta, bb: ByteBuffer): void;
export declare function encodeMessageMeta(
message: MessageMeta,
bb: ByteBuffer,
): void;
export declare function decodeMessageMeta(buffer: ByteBuffer): MessageMeta;
export declare function encodeMessage(message: Message, bb: ByteBuffer): void;
export declare function decodeMessage(buffer: ByteBuffer): Message;
export declare function encodeLog(message: Log, bb: ByteBuffer): void;
export declare function decodeLog(buffer: ByteBuffer): Log;
export declare function encodeWebsocketMessage(message: WebsocketMessage, bb: ByteBuffer): void;
export declare function decodeWebsocketMessage(buffer: ByteBuffer): WebsocketMessage;
export declare function encodeWebsocketMessageWelcome(message: WebsocketMessageWelcome, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageWelcome(buffer: ByteBuffer): WebsocketMessageWelcome;
export declare function encodeWebsocketMessage(
message: WebsocketMessage,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessage(
buffer: ByteBuffer,
): WebsocketMessage;
export declare function encodeWebsocketMessageWelcome(
message: WebsocketMessageWelcome,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageWelcome(
buffer: ByteBuffer,
): WebsocketMessageWelcome;
export declare function encodeWebsocketMessageFileChangeNotification(
message: WebsocketMessageFileChangeNotification,
bb: ByteBuffer,
@@ -793,26 +931,69 @@ export declare function encodeWebsocketMessageFileChangeNotification(
export declare function decodeWebsocketMessageFileChangeNotification(
buffer: ByteBuffer,
): WebsocketMessageFileChangeNotification;
export declare function encodeWebsocketCommand(message: WebsocketCommand, bb: ByteBuffer): void;
export declare function decodeWebsocketCommand(buffer: ByteBuffer): WebsocketCommand;
export declare function encodeWebsocketCommandBuild(message: WebsocketCommandBuild, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandBuild(buffer: ByteBuffer): WebsocketCommandBuild;
export declare function encodeWebsocketCommandManifest(message: WebsocketCommandManifest, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandManifest(buffer: ByteBuffer): WebsocketCommandManifest;
export declare function encodeWebsocketMessageBuildSuccess(message: WebsocketMessageBuildSuccess, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildSuccess(buffer: ByteBuffer): WebsocketMessageBuildSuccess;
export declare function encodeWebsocketMessageBuildFailure(message: WebsocketMessageBuildFailure, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageBuildFailure(buffer: ByteBuffer): WebsocketMessageBuildFailure;
export declare function encodeWebsocketCommand(
message: WebsocketCommand,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommand(
buffer: ByteBuffer,
): WebsocketCommand;
export declare function encodeWebsocketCommandBuild(
message: WebsocketCommandBuild,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandBuild(
buffer: ByteBuffer,
): WebsocketCommandBuild;
export declare function encodeWebsocketCommandManifest(
message: WebsocketCommandManifest,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandManifest(
buffer: ByteBuffer,
): WebsocketCommandManifest;
export declare function encodeWebsocketMessageBuildSuccess(
message: WebsocketMessageBuildSuccess,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageBuildSuccess(
buffer: ByteBuffer,
): WebsocketMessageBuildSuccess;
export declare function encodeWebsocketMessageBuildFailure(
message: WebsocketMessageBuildFailure,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageBuildFailure(
buffer: ByteBuffer,
): WebsocketMessageBuildFailure;
export declare function encodeWebsocketCommandBuildWithFilePath(
message: WebsocketCommandBuildWithFilePath,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketCommandBuildWithFilePath(buffer: ByteBuffer): WebsocketCommandBuildWithFilePath;
export declare function encodeWebsocketMessageResolveID(message: WebsocketMessageResolveID, bb: ByteBuffer): void;
export declare function decodeWebsocketMessageResolveID(buffer: ByteBuffer): WebsocketMessageResolveID;
export declare function encodeNPMRegistry(message: NPMRegistry, bb: ByteBuffer): void;
export declare function decodeWebsocketCommandBuildWithFilePath(
buffer: ByteBuffer,
): WebsocketCommandBuildWithFilePath;
export declare function encodeWebsocketMessageResolveID(
message: WebsocketMessageResolveID,
bb: ByteBuffer,
): void;
export declare function decodeWebsocketMessageResolveID(
buffer: ByteBuffer,
): WebsocketMessageResolveID;
export declare function encodeNPMRegistry(
message: NPMRegistry,
bb: ByteBuffer,
): void;
export declare function decodeNPMRegistry(buffer: ByteBuffer): NPMRegistry;
export declare function encodeNPMRegistryMap(message: NPMRegistryMap, bb: ByteBuffer): void;
export declare function decodeNPMRegistryMap(buffer: ByteBuffer): NPMRegistryMap;
export declare function encodeBunInstall(message: BunInstall, bb: ByteBuffer): void;
export declare function encodeNPMRegistryMap(
message: NPMRegistryMap,
bb: ByteBuffer,
): void;
export declare function decodeNPMRegistryMap(
buffer: ByteBuffer,
): NPMRegistryMap;
export declare function encodeBunInstall(
message: BunInstall,
bb: ByteBuffer,
): void;
export declare function decodeBunInstall(buffer: ByteBuffer): BunInstall;

142
src/api/schema.js generated
View File

@@ -122,7 +122,12 @@ function encodeStackFrame(message, bb) {
var value = message["scope"];
if (value != null) {
var encoded = StackFrameScope[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "StackFrameScope"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "StackFrameScope"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "scope"');
@@ -499,7 +504,10 @@ function encodeFallbackMessageContainer(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = FallbackStep[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "FallbackStep"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "FallbackStep"',
);
bb.writeByte(encoded);
}
@@ -612,7 +620,10 @@ function encodeJSX(message, bb) {
var value = message["runtime"];
if (value != null) {
var encoded = JSXRuntime[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "JSXRuntime"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "JSXRuntime"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "runtime"');
@@ -772,10 +783,12 @@ function decodeJavascriptBundle(bb) {
var length = bb.readVarUint();
var values = (result["modules"] = Array(length));
for (var i = 0; i < length; i++) values[i] = decodeJavascriptBundledModule(bb);
for (var i = 0; i < length; i++)
values[i] = decodeJavascriptBundledModule(bb);
var length = bb.readVarUint();
var values = (result["packages"] = Array(length));
for (var i = 0; i < length; i++) values[i] = decodeJavascriptBundledPackage(bb);
for (var i = 0; i < length; i++)
values[i] = decodeJavascriptBundledPackage(bb);
result["etag"] = bb.readByteArray();
result["generated_at"] = bb.readUint32();
result["app_package_json_dependencies_hash"] = bb.readByteArray();
@@ -829,7 +842,9 @@ function encodeJavascriptBundle(message, bb) {
if (value != null) {
bb.writeByteArray(value);
} else {
throw new Error('Missing required field "app_package_json_dependencies_hash"');
throw new Error(
'Missing required field "app_package_json_dependencies_hash"',
);
}
var value = message["import_from_name"];
@@ -951,7 +966,12 @@ function encodeModuleImportRecord(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = ModuleImportType[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ModuleImportType"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "ModuleImportType"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -1078,7 +1098,10 @@ function encodeLoaderMap(message, bb) {
for (var i = 0; i < n; i++) {
value = values[i];
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
} else {
@@ -1152,7 +1175,10 @@ function encodeLoadedEnvConfig(message, bb) {
var value = message["dotenv"];
if (value != null) {
var encoded = DotEnvBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "DotEnvBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "DotEnvBehavior"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "dotenv"');
@@ -1254,7 +1280,12 @@ function encodeFrameworkConfig(message, bb) {
if (value != null) {
bb.writeByte(6);
var encoded = CSSInJSBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "CSSInJSBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "CSSInJSBehavior"',
);
bb.writeByte(encoded);
}
@@ -1286,7 +1317,11 @@ function encodeFrameworkEntryPoint(message, bb) {
if (value != null) {
var encoded = FrameworkEntryPointType[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "FrameworkEntryPointType"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "FrameworkEntryPointType"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -1435,7 +1470,12 @@ function encodeLoadedFramework(message, bb) {
var value = message["client_css_in_js"];
if (value != null) {
var encoded = CSSInJSBehavior[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "CSSInJSBehavior"');
if (encoded === void 0)
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "CSSInJSBehavior"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "client_css_in_js"');
@@ -1719,7 +1759,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = ResolveMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ResolveMode"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "ResolveMode"',
);
bb.writeByte(encoded);
}
@@ -1817,7 +1860,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(15);
var encoded = Platform[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Platform"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Platform"',
);
bb.writeByte(encoded);
}
@@ -1891,7 +1937,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(26);
var encoded = MessageLevel[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"',
);
bb.writeVarUint(encoded);
}
@@ -1899,7 +1948,10 @@ function encodeTransformOptions(message, bb) {
if (value != null) {
bb.writeByte(27);
var encoded = SourceMapMode[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "SourceMapMode"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "SourceMapMode"',
);
bb.writeByte(encoded);
}
bb.writeByte(0);
@@ -2006,7 +2058,10 @@ function encodeTransform(message, bb) {
if (value != null) {
bb.writeByte(4);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
@@ -2061,7 +2116,10 @@ function encodeScan(message, bb) {
if (value != null) {
bb.writeByte(3);
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
}
bb.writeByte(0);
@@ -2126,7 +2184,10 @@ function encodeScannedImport(message, bb) {
var value = message["kind"];
if (value != null) {
var encoded = ImportKind[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "ImportKind"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "ImportKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2223,7 +2284,11 @@ function encodeTransformResponse(message, bb) {
if (value != null) {
var encoded = TransformResponseStatus[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "TransformResponseStatus"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "TransformResponseStatus"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "status"');
@@ -2434,7 +2499,10 @@ function encodeMessage(message, bb) {
var value = message["level"];
if (value != null) {
var encoded = MessageLevel[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "MessageLevel"',
);
bb.writeVarUint(encoded);
} else {
throw new Error('Missing required field "level"');
@@ -2596,7 +2664,11 @@ function encodeWebsocketMessage(message, bb) {
if (value != null) {
var encoded = WebsocketMessageKind[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "WebsocketMessageKind"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "WebsocketMessageKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2624,7 +2696,10 @@ function encodeWebsocketMessageWelcome(message, bb) {
var value = message["javascriptReloader"];
if (value != null) {
var encoded = Reloader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Reloader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Reloader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "javascriptReloader"');
@@ -2664,7 +2739,10 @@ function encodeWebsocketMessageFileChangeNotification(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');
@@ -2684,7 +2762,11 @@ function encodeWebsocketCommand(message, bb) {
if (value != null) {
var encoded = WebsocketCommandKind[value];
if (encoded === void 0)
throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "WebsocketCommandKind"');
throw new Error(
"Invalid value " +
JSON.stringify(value) +
' for enum "WebsocketCommandKind"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "kind"');
@@ -2759,7 +2841,10 @@ function encodeWebsocketMessageBuildSuccess(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');
@@ -2809,7 +2894,10 @@ function encodeWebsocketMessageBuildFailure(message, bb) {
var value = message["loader"];
if (value != null) {
var encoded = Loader[value];
if (encoded === void 0) throw new Error("Invalid value " + JSON.stringify(value) + ' for enum "Loader"');
if (encoded === void 0)
throw new Error(
"Invalid value " + JSON.stringify(value) + ' for enum "Loader"',
);
bb.writeByte(encoded);
} else {
throw new Error('Missing required field "loader"');

Some files were not shown because too many files have changed in this diff Show More