Compare commits

..

2 Commits

Author SHA1 Message Date
Dylan Conway
b185cd134b always set exit code 2023-01-18 16:20:12 -08:00
Dylan Conway
37f72ef632 set exit code if signal is 42 2023-01-18 16:08:20 -08:00
473 changed files with 29914 additions and 50706 deletions

30
.github/workflows/bun-homebrew.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: bun-homebrew
on:
release:
types:
- published
- edited
jobs:
homebrew:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ env.HOMEBREW_TOKEN }}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.6'
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Release ${{ github.event.release.tag_name }}

View File

@@ -1,165 +0,0 @@
name: bun-linux
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
# - name: Sign Release
# id: sign-release
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# env:
# GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
# run: |
# echo "$GPG_PASSPHRASE" | bun run .scripts/sign-release.ts
# - name: Release Checksum
# id: release-checksum
# uses: ncipollo/release-action@v1
# if: |
# github.repository_owner == 'oven-sh'
# && github.ref == 'refs/heads/main'
# with:
# prerelease: true
# body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
# allowUpdates: true
# replacesArtifacts: true
# generateReleaseNotes: true
# artifactErrorsFailBuild: true
# token: ${{ secrets.GITHUB_TOKEN }}
# name: "Canary (${{github.sha}})"
# tag: "canary"
# artifacts: "SHASUMS256.txt,SHASUMS256.txt.asc"

View File

@@ -32,25 +32,32 @@ jobs:
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
@@ -121,20 +128,10 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -146,3 +143,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -145,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -242,14 +242,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -257,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -370,9 +370,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -243,14 +243,14 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -258,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -374,9 +374,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -119,16 +119,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -137,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,14 +245,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -260,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -376,9 +376,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"

View File

@@ -1,116 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

View File

@@ -1,210 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- id: bun-install
name: Install Dependencies
run: |
bun upgrade --canary
bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>

View File

@@ -0,0 +1,60 @@
name: Release bun-types@canary
on:
push:
branches: [main]
paths:
- 'packages/bun-types/**'
jobs:
tests:
name: Build, test, publish canary
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test
- name: Set temp version
working-directory: packages/bun-types/dist
run: |
git_hash=$(git rev-parse --short "$GITHUB_SHA")
new_pkg_version="$(bun --version)-canary.${git_hash}"
echo "new_pkg_version"
echo "${new_pkg_version}"
npm version ${new_pkg_version} --no-git-tag-version
- name: Publish to NPM
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_BUN_TYPES_TOKEN }}
# dry-run: true
tag: canary
# - name: Publish on NPM
# working-directory: packages/bun-types/dist
# run: npm publish --access public --tag canary # --dry-run
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# NODE_AUTH_TOKEN: ${{ secrets.NPM_BUN_TYPES_TOKEN }}

135
.github/workflows/bun-types-release.yml vendored Normal file
View File

@@ -0,0 +1,135 @@
name: Release bun-types
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun upgrade --canary; bun install
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish to NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://registry.npmjs.org"
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish to NPM
working-directory: packages/bun-types/dist
run: npm publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://npm.pkg.github.com/"
scope: "@oven-sh"
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

4
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -109,4 +110,3 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/debug-bindings-obj

View File

@@ -1,6 +1,7 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -1,7 +1,6 @@
{
"recommendations": [
"AugusteRame.zls-vscode",
"JarredSumner.zig-unofficial",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb"

View File

@@ -13,7 +13,7 @@
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "JarredSumner.zig-unofficial",
"editor.defaultFormatter": "tiehuis.zig",
"editor.formatOnSave": true
},
"[ts]": {
@@ -182,11 +182,7 @@
"ethernet.h": "c",
"inet.h": "c",
"packet.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp"
"queue": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "Enabled",

View File

@@ -1,86 +0,0 @@
# Contributing to Bun
All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Bun's codebase
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
Today (Feburary 2023), Bun's codebase has five distinct parts:
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime (`src/bun.js/`)
- JavaScript runtime bindings (`src/bun.zig/bindings/**/*.cpp`)
- Package manager (`src/install/`)
- Shared utilities (`src/string_immutable.zig`)
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in `src/js_parser.zig`. The JavaScript AST data structures are mostly in `src/js_ast.zig`. The JavaScript lexer is in `src/js_lexer.zig`. A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
## Memory management in Bun
For the Zig code, please:
1. Do your best to avoid dynamically allocating memory.
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
## JavaScript runtime
Most of Bun's JavaScript runtime code lives in `src/bun.js`.
### Calling C++ from Zig & Zig from C++
TODO: document this (see bindings.zig and bindings.cpp for now)
### Adding a new JavaScript class
1. Add a new file in `src/bun.js/*.classes.ts` to define the instance and static methods for the class.
2. Add a new file in `src/bun.js/**/*.zig` and expose the struct in `src/bun.js/generated_classes_list.zig`
3. Run `make codegen`
Copy from examples like `Subprocess` or `Response`.
### ESM modules
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
The ESM modules in Bun are located in `src/bun.js/*.exports.js`. Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
The module loader is in `src/bun.js/module_loader.zig`.
### JavaScript Builtins
JavaScript builtins are located in `src/bun.js/builtins/*.js`.
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
To regenerate the builtins:
```sh
make clean-bindings && make generate-builtins && make bindings -j10
```
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)
### Strings
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
#### JavaScriptCore C API
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
## Testing
See `../test/README.md` for information on how to run tests.

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=feb9
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -122,8 +122,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
@@ -149,9 +149,6 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
@@ -306,6 +303,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
@@ -485,9 +484,6 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile

View File

@@ -8,7 +8,7 @@ ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1393+38eebf3c4"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
@@ -99,7 +99,7 @@ RUN tar -xf ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/feb9/bun-webkit-linux-$BUILDARCH.tar.gz && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null

View File

@@ -35,7 +35,7 @@ DOCKER_BUILDARCH = amd64
BREW_PREFIX_PATH = /usr/local
DEFAULT_MIN_MACOS_VERSION = 10.14
MARCH_NATIVE = -march=$(CPU_TARGET) -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH = -march=nehalem
NATIVE_OR_OLD_MARCH = -march=westmere
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
@@ -509,9 +509,6 @@ npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
@@ -925,7 +922,7 @@ headers:
rm -f /tmp/build-jsc-headers src/bun.js/bindings/headers.zig
touch src/bun.js/bindings/headers.zig
$(ZIG) build headers-obj
$(CXX) $(PLATFORM_LINKER_FLAGS) $(JSC_FILES_DEBUG) ${ICU_FLAGS} $(BUN_LLD_FLAGS_WITHOUT_JSC) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers -lc;
$(CXX) $(PLATFORM_LINKER_FLAGS) $(JSC_FILES_DEBUG) ${ICU_FLAGS} $(DEBUG_IO_FILES) $(BUN_LLD_FLAGS_WITHOUT_JSC) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers -lc;
/tmp/build-jsc-headers
$(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig
$(BUN_OR_NODE) misctools/headers-cleaner.js
@@ -1394,19 +1391,10 @@ bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG
.PHONY: jsc-bindings-mac
jsc-bindings-mac: bindings
# lInux only
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
.PHONY: mimalloc-debug
mimalloc-debug:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; make clean || echo ""; \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} ${MIMALLOC_VALGRIND_ENABLED_FLAG} \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} \
-DCMAKE_BUILD_TYPE=Debug \
-DMI_DEBUG_FULL=1 \
-DMI_SKIP_COLLECT_ON_EXIT=1 \

View File

@@ -23,32 +23,24 @@ Today, bun's primary focus is bun.js: bun's JavaScript runtime.
## Install
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon). If you want to use Bun on Windows, you will need to use Windows Subsystem for Linux. Bun will [soon](https://github.com/oven-sh/bun/issues/43) support Windows, natively.
curl:
Native: (macOS x64 & Silicon, Linux x64, Windows Subsystem for Linux)
```sh
curl -fsSL https://bun.sh/install | bash
```
npm:
```sh
npm install -g bun
```
Homebrew:
Homebrew: (MacOS and Linux)
```sh
brew tap oven-sh/bun
brew install bun
```
Docker:
Docker: (Linux x64)
```sh
docker pull oven/bun
docker run --rm --init --ulimit memlock=-1:-1 oven/bun
docker pull jarredsumner/bun:edge
docker run --rm --init --ulimit memlock=-1:-1 jarredsumner/bun:edge
```
If using Linux, kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
@@ -424,21 +416,6 @@ Assuming a package.json with a `"clean"` command in `"scripts"`:
}
```
## Using bun as a WebAssembly runner
Bun v0.5.2 added experimental support for the [WebAssembly System Interface](https://github.com/WebAssembly/WASI) (WASI). This means you can run WebAssembly binaries in Bun.
To run a WASI binary, use `bun run`:
```bash
bun run ./my-wasm-app.wasm
# you can omit "run" if the filename ends with .wasm
bun ./my-wasm-app.wasm
```
WASI support is based on [wasi-js](https://github.com/sagemathinc/cowasm/tree/main/core/wasi-js). Currently, it only supports WASI binaries that use the `wasi_snapshot_preview1` or `wasi_unstable` APIs. Bun's implementation is not optimized for performance, but if this feature gets popular, we'll definitely invest time in making it faster.
## Creating a Discord bot with Bun
### Application Commands
@@ -604,6 +581,7 @@ You can see [Bun's Roadmap](https://github.com/oven-sh/bun/issues/159), but here
| ------------------------------------------------------------------------------------- | -------------- |
| Web Streams with Fetch API | bun.js |
| Web Streams with HTMLRewriter | bun.js |
| Package hoisting that matches npm behavior | bun install |
| Source Maps (unbundled is supported) | JS Bundler |
| Source Maps | CSS |
| JavaScript Minifier | JS Transpiler |
@@ -1277,7 +1255,7 @@ bun install --backend copyfile
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has it's own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
```bash
rm -rf node_modules
@@ -1298,7 +1276,7 @@ buns usage of `Cache-Control` ignores `Age`. This improves performance, but m
### `bun run`
`bun run` is a fast `package.json` script runner and executable runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
`bun run` is a fast `package.json` script runner. Instead of waiting 170ms for your npm client to start every time, you wait 6ms for bun.
By default, `bun run` prints the script that will be invoked:
@@ -1313,7 +1291,7 @@ You can disable that with `--silent`
bun run --silent clean
```
`bun run ${script-name}` runs the equivalent of `npm run script-name`, `npx bin-name`, and `node file-name` all in one command. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${script-name}` runs the equivalent of `npm run script-name`. For example, `bun run dev` runs the `dev` script in `package.json`, which may sometimes spin up non-bun processes.
`bun run ${javascript-file.js}` will run it with bun, as long as the file doesn't have a node shebang.
@@ -2869,24 +2847,21 @@ console.log(address); // "2606:2800:220:1:248:1893:25c8:1946"
```
Bun supports three backends for DNS resolution:
- `c-ares` - This is the default on Linux, and it uses the [c-ares](https://c-ares.org/) library to perform DNS resolution.
- `system` - Uses the system's non-blocking DNS resolver, if available. Otherwise, falls back to `getaddrinfo`. This is the default on macOS, and the same as `getaddrinfo` on Linux.
- `getaddrinfo` - Uses the POSIX standard `getaddrinfo` function, which may cause performance issues under concurrent load.
- `c-ares` - This is the default on Linux, and it uses the [c-ares](https://c-ares.org/) library to perform DNS resolution.
- `system` - Uses the system's non-blocking DNS resolver, if available. Otherwise, falls back to `getaddrinfo`. This is the default on macOS, and the same as `getaddrinfo` on Linux.
- `getaddrinfo` - Uses the POSIX standard `getaddrinfo` function, which may cause performance issues under concurrent load.
You can choose a particular backend by specifying `backend` as an option.
```ts
import { dns } from "bun";
const [{ address, ttl }] = await dns.lookup("example.com", {
backend: "c-ares",
});
const [{ address, ttl }] = await dns.lookup("example.com", { backend: "c-ares" });
console.log(address); // "93.184.216.34"
console.log(ttl); // 21237
```
Note: the `ttl` property is only accurate when the `backend` is c-ares. Otherwise, `ttl` will be `0`.
Note: the `ttl` property is only accurate when the `backend` is c-ares. Otherwise, `ttl` will be `0`.
This was added in Bun v0.5.0.
@@ -4648,7 +4623,7 @@ It will check the lockfile for the version. If the lockfile doesn't have a versi
Lowlights:
- TypeScript type support isn't implemented yet
- TypeScript type support isn't implmented yet
- patch package support isn't implemented yet
#### Resolving packages
@@ -5009,7 +4984,6 @@ bun also statically links these libraries:
- [`c-ares`](https://github.com/c-ares/c-ares), which is MIT licensed
- `libicu` 72, which can be found here: <https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE>
- A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets), which is Apache 2.0 licensed
- WASI implementation from [`wasi-js`](https://github.com/sagemathinc/cowasm/tree/main/core/wasi-js), which is BSD 3 clause licensed. Note that wasi-js is originally based on [wasmer-js](https://github.com/wasmerio/wasmer-js), which is MIT licensed. wasmer-js was based on [node-wasi](https://github.com/devsnek/node-wasi) by Gus Caplan (also MIT licensed). You can [read more about the history here](https://github.com/sagemathinc/cowasm/tree/main/core/wasi-js#history).
For compatibility reasons, these NPM packages are embedded into buns binary and injected if imported.
@@ -5133,7 +5107,7 @@ which clang-15
If it is not, you will have to run this to link it:
```bash
export PATH="$PATH:$(brew --prefix llvm@15)/bin"
export PATH="$(brew --prefix llvm@15)/bin"
export LDFLAGS="$LDFLAGS -L$(brew --prefix llvm@15)/lib"
export CPPFLAGS="$CPPFLAGS -I$(brew --prefix llvm@15)/include"
```

View File

@@ -1,49 +1,9 @@
import { bench, run } from "mitata";
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";
bench("Buffer.isBuffer(buffer)", () => {
return Buffer.isBuffer(isBuffer);
});
{
var j = 0;
j += 1;
j += eval("'ok'");
bench("Buffer.isBuffer(string)", () => {
return Buffer.isBuffer(j);
});
}
bench("Buffer.from('short string')", () => {
return Buffer.from("short string");
});
const loooong = "long string".repeat(9999).split("").join(" ");
bench("Buffer.byteLength('long string'.repeat(9999))", () => {
return Buffer.byteLength(loooong);
});
var hundred = new ArrayBuffer(100);
bench("Buffer.from(ArrayBuffer(100))", () => {
return Buffer.from(hundred);
});
var hundredArray = new Uint8Array(100);
bench("Buffer.from(Uint8Array(100))", () => {
return Buffer.from(hundredArray);
});
var empty = new Uint8Array(0);
bench("Buffer.from(Uint8Array(0))", () => {
return Buffer.from(empty);
});
bench("new Buffer(Uint8Array(0))", () => {
return new Buffer(empty);
bench("new Buffer(0)", () => {
return new Buffer(0);
});
bench(`new Buffer(${N})`, () => {
@@ -66,4 +26,4 @@ bench("Buffer.alloc(24_000)", () => {
return Buffer.alloc(24_000);
});
await run({});
await run();

Binary file not shown.

View File

@@ -1,81 +1,71 @@
const EventEmitterNative = require("events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
const EventEmitter = require("events").EventEmitter;
import { bench, run } from "mitata";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", (event) => {
event.preventDefault();
const emitter = new EventEmitter();
const event = new Event("hello");
emitter.on("hello", (event) => {
event.preventDefault();
});
var id = 0;
bench("EventEmitter.emit", () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench("[monkey] EventEmitter.emit", () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
if (!called) {
throw new Error("monkey failed");
}
});
bench("EventEmitter.on x 10_000 (handler)", () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
bench("[monkey] EventEmitter.on x 10_000 (handler)", () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
monkey.off("hey", cb);
});
var target = new EventTarget();
target.addEventListener("hello", (event) => {});

View File

@@ -1,34 +0,0 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {
const data = new FormData();
data.append("foo", "bar");
data.append("baz", blob);
});
const data = new FormData();
data.append("foo", "bar");
data.append("baz", blob);
const formText =
// single field form data
"--Form\r\n" + 'Content-Disposition: form-data; name="foo"\r\n\r\n' + "bar\r\n" + "--Form--\r\n";
bench("response.formData()", async () => {
await new Response(formText, {
headers: {
"Content-Type": "multipart/form-data; boundary=Form",
},
}).formData();
});
bench("new Response(formData).text()", async () => {
await new Response(data).text();
});
bench("new Response(formData).formData()", async () => {
await new Response(data).formData();
});
await run();

View File

@@ -1,25 +0,0 @@
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
bench("C++ fn regular", () => {
regular();
});
bench("C++ fn", () => {
fn();
});
bench("C++ getter", () => {
return noop.getterSetter;
});
bench("C++ setter", () => {
noop.getterSetter = 1;
});
run();

View File

@@ -1,6 +0,0 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
}
}

View File

@@ -1,17 +0,0 @@
import { readdirSync, statSync } from "fs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";
const result = statSync(dir);
bench("Stat.isBlockDevice", () => result.isBlockDevice());
bench("Stat.isCharacterDevice", () => result.isCharacterDevice());
bench("Stat.isDirectory", () => result.isDirectory());
bench("Stat.isFIFO", () => result.isFIFO());
bench("Stat.isFile", () => result.isFile());
bench("Stat.isSocket", () => result.isSocket());
bench("Stat.isSymbolicLink", () => result.isSymbolicLink());
await run();

View File

@@ -4,20 +4,23 @@ var short = new TextEncoder().encode("Hello World!");
var shortUTF16 = new TextEncoder().encode("Hello World 💕💕💕");
var long = new TextEncoder().encode("Hello World!".repeat(1024));
var longUTF16 = new TextEncoder().encode("Hello World 💕💕💕".repeat(1024));
var decoder = new TextDecoder();
bench(`${short.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(short);
});
bench(`${short.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(shortUTF16);
});
bench(`${long.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(long);
});
bench(`${longUTF16.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(longUTF16);
});

View File

@@ -1,102 +1,58 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bench, run, group } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const esbuild_ = require("esbuild/lib/main");
const swc_ = require("@swc/core");
const babel_ = require("@babel/core");
const code = readFileSync(
dirname(fileURLToPath(import.meta.url)) +
"/../../src/test/fixtures/simple.jsx",
"utf-8",
);
async function getWithName(name) {
let transformSync;
let transform;
let opts;
if (name === "bun") {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (name === "esbuild") {
try {
transformSync = esbuild_.transformSync;
transform = esbuild_.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (name === "swc") {
try {
transformSync = swc_.transformSync;
transform = swc_.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (name === "babel") {
try {
transformSync = babel_.transformSync;
transform = babel_.transform;
opts = {
sourceMaps: false,
presets: ["@babel/preset-react"],
};
} catch (exception) {
throw exception;
}
var transformSync;
var transform;
var opts;
if (process.isBun) {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (process.env["esbuild"]) {
try {
const esbuild = await import("esbuild");
transformSync = esbuild.transformSync;
transform = esbuild.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (process.env["swc"]) {
try {
const swc = await import("@swc/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (process.env["babel"]) {
try {
const swc = await import("@babel/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
presets: [(await import("@babel/preset-react")).default],
};
} catch (exception) {
throw exception;
}
return {
transformSync,
transform,
opts,
name,
};
}
const bun = process.isBun ? await getWithName("bun") : null;
const esbuild = await getWithName("esbuild");
const swc = await getWithName("swc");
const babel = await getWithName("babel");
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
const transpilers = [bun, esbuild, swc, babel].filter(Boolean);
group("transformSync (" + ((code.length / 1024) | 0) + " KB jsx file)", () => {
for (let { name, transformSync, opts } of transpilers) {
bench(name, () => {
transformSync(code, opts);
});
}
});
group("tranform x 5", () => {
for (let { name, transform, opts } of transpilers) {
bench(name, async () => {
return Promise.all([
transform(code, opts),
transform(code + "\n", opts),
transform("\n" + code + "\n", opts),
transform("\n" + code + "\n\n", opts),
transform("\n\n" + code + "\n\n", opts),
]);
});
}
});
await run();
if (process.env.ASYNC) {
console.log(await transform(code, opts));
} else {
console.log(transformSync(code, opts));
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,83 +0,0 @@
# https://hub.docker.com/_/debian
# https://hub.docker.com/_/ubuntu
ARG IMAGE=debian:bullseye-slim
FROM $IMAGE AS base
# https://github.com/oven-sh/bun/releases
ARG BUN_VERSION=latest
RUN apt-get update -qq \
&& apt-get install -qq --no-install-recommends \
ca-certificates \
curl \
dirmngr \
gpg \
gpg-agent \
unzip \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) build="x64-baseline";; \
arm64) build="aarch64";; \
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
esac \
&& version="$BUN_VERSION" \
&& case "$version" in \
latest | canary | bun-v*) tag="$version"; ;; \
v*) tag="bun-$version"; ;; \
*) tag="bun-v$version"; ;; \
esac \
&& case "$tag" in \
latest) release="latest/download"; ;; \
*) release="download/$tag"; ;; \
esac \
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
-fsSLO \
--compressed \
--retry 5 \
|| (echo "error: unknown release: ($tag)" && exit 1) \
&& for key in \
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
; do \
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
done \
&& gpg --update-trustdb \
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
-fsSLO \
--compressed \
--retry 5 \
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
&& unzip "bun-linux-$build.zip" \
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
&& chmod +x /usr/local/bin/bun \
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
&& which bun \
&& which bunx \
&& bun --version
FROM $IMAGE
RUN groupadd bun \
--gid 1000 \
&& useradd bun \
--uid 1000 \
--gid bun \
--shell /bin/sh \
--create-home
COPY --from=base /usr/local/bin/bun /usr/local/bin
COPY --from=base /usr/local/bin/bunx /usr/local/bin
RUN which bun \
&& which bunx \
&& bun --version
WORKDIR /home/bun/app
CMD ["bun"]

View File

@@ -1,76 +0,0 @@
import { file, serve } from "bun";
import { existsSync, statSync } from "fs";
serve({
fetch(req: Request) {
let pathname = new URL(req.url).pathname.substring(1);
if (pathname == "") {
pathname = import.meta.url.replace("file://", "");
}
if (!existsSync(pathname)) {
return new Response(null, { status: 404 });
}
const stats = statSync(pathname);
// https://github.com/gornostay25/svelte-adapter-bun/blob/master/src/sirv.js
const headers = new Headers({
"Content-Length": "" + stats.size,
"Last-Modified": stats.mtime.toUTCString(),
ETag: `W/"${stats.size}-${stats.mtime.getTime()}"`,
});
if (req.headers.get("if-none-match") === headers.get("ETag")) {
return new Response(null, { status: 304 });
}
const opts = { code: 200, start: 0, end: Infinity, range: false };
if (req.headers.has("range")) {
opts.code = 206;
let [x, y] = req.headers.get("range").replace("bytes=", "").split("-");
let end = (opts.end = parseInt(y, 10) || stats.size - 1);
let start = (opts.start = parseInt(x, 10) || 0);
if (start >= stats.size || end >= stats.size) {
headers.set("Content-Range", `bytes */${stats.size}`);
return new Response(null, {
headers: headers,
status: 416,
});
}
headers.set("Content-Range", `bytes ${start}-${end}/${stats.size}`);
headers.set("Content-Length", "" + (end - start + 1));
headers.set("Accept-Ranges", "bytes");
opts.range = true;
}
if (opts.range) {
return new Response(file(pathname).slice(opts.start, opts.end), {
headers,
status: opts.code,
});
}
return new Response(file(pathname), { headers, status: opts.code });
},
// this is called when fetch() throws or rejects
// error(err: Error) {
// return new Response("uh oh! :(" + String(err.toString()), { status: 500 });
// },
// this boolean enables the bun's default error handler
// sometime after the initial release, it will auto reload as well
development: process.env.NODE_ENV !== "production",
// note: this isn't node, but for compatibility bun supports process.env + more stuff in process
// SSL is enabled if these two are set
// certFile: './cert.pem',
// keyFile: './key.pem',
port: 3000, // number or string
hostname: "localhost", // defaults to 0.0.0.0
});

View File

@@ -1,24 +0,0 @@
// A simple way to connect FileSystemRouter to Bun#serve
// run with `bun run index.tsx`
import { renderToReadableStream } from 'react-dom/server'
import { FileSystemRouter } from 'bun'
export default {
port: 3000,
async fetch(request: Request) {
const router = new FileSystemRouter({
dir: process.cwd() + "/pages",
style: "nextjs"
})
const route = router.match(request)
const { default: Root } = await import(route.filePath)
return new Response(
await renderToReadableStream(
<Root {...route.params} />
)
)
}
}

View File

@@ -1,14 +0,0 @@
{
"name": "react-routes",
"module": "index.tsx",
"type": "module",
"devDependencies": {
"@types/react": "^18.0.27",
"@types/react-dom": "^18.0.10",
"bun-types": "^0.4.0"
},
"dependencies": {
"react": "^18.2.0",
"react-dom": "^18.2.0"
}
}

View File

@@ -1,17 +0,0 @@
// reachable from http://localhost:3000/
export default () => (
<html>
<head>
<title>index</title>
</head>
<body>
<h1>
<a href="/one" >one</a>
</h1>
<h1>
<a href="/two" >two</a>
</h1>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/one
export default () => (
<html>
<head>
<title>one</title>
</head>
<body>
<p>one</p>
</body>
</html>
)

View File

@@ -1,12 +0,0 @@
// reachable from http://localhost:3000/two
export default () => (
<html>
<head>
<title>two</title>
</head>
<body>
<p>two</p>
</body>
</html>
)

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"strict": false,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,12 +1,14 @@
{
"dependencies": {
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"express": "^4.18.2",
"mitata": "^0.1.3",
"peechy": "latest",
"prettier": "^2.4.1",
"react": "next",
"react-dom": "next",
"eslint": "^8.20.0",
"eslint-config-prettier": "^8.5.0",
"prettier": "^2.4.1",
"svelte": "^3.52.0",
"typescript": "latest"
},
"private": true,
@@ -15,7 +17,7 @@
"build-fallback": "esbuild --target=esnext --bundle src/fallback.ts --format=iife --platform=browser --minify > src/fallback.out.js",
"postinstall": "bash .scripts/postinstall.sh",
"typecheck": "tsc",
"fmt": "prettier --write './**/*.{ts,tsx,js,jsx}' --config .prettierrc",
"fmt": "prettier --write './**/*.{ts,tsx,js,jsx}'",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix"
},
@@ -23,7 +25,7 @@
"@types/react": "^18.0.25",
"@typescript-eslint/eslint-plugin": "^5.31.0",
"@typescript-eslint/parser": "^5.31.0",
"bun-webkit": "0.0.1-61ed147fdeaa0787d77bddb332177f23ad96d6fe"
"bun-webkit": "latest"
},
"version": "0.0.0"
}

View File

@@ -1,3 +0,0 @@
.DS_Store
node_modules
tmp

View File

@@ -1,7 +0,0 @@
# bun-ecosystem
A registry to test `npm` packages using Bun. This can be used as a tool to find bugs in Bun by running the test suites of these packages. In the future, we will run theses tests to catch regressions between releases.
```sh
bun run test
```

Binary file not shown.

View File

@@ -1,14 +0,0 @@
{
"private": true,
"dependencies": {
"globby": "^13.1.3"
},
"devDependencies": {
"bun-types": "canary",
"prettier": "^2.8.2"
},
"scripts": {
"format": "prettier --write src",
"test": "bun run src/runner.ts"
}
}

View File

@@ -1,69 +0,0 @@
export type Package = {
readonly name: string;
readonly repository: string;
readonly cwd?: string;
readonly tests?: {
readonly style: "jest" | "ava" | "tape" | "custom";
readonly include: string[];
readonly exclude?: string[];
readonly disabled?: boolean;
};
};
export const packages: Package[] = [
{
name: "lodash",
repository: github("lodash/lodash"),
tests: {
style: "jest",
include: ["test/*.js"],
exclude: [
"debounce.test.js", // hangs runner
"size.test.js", // require('vm').runInNewContext()
"merge.test.js", // failing
],
},
},
{
name: "chalk",
repository: github("chalk/chalk"),
tests: {
style: "ava",
include: ["test/*.js"],
},
},
{
name: "request",
repository: github("request/request"),
tests: {
style: "tape",
include: ["tests/*.js"],
},
},
{
name: "commander",
repository: github("tj/commander.js"),
tests: {
style: "jest",
include: ["tests/*.js"],
},
},
{
name: "express",
repository: github("expressjs/express"),
tests: {
style: "jest",
include: ["test/**/*.js"],
exclude: [
"test/res.sendStatus.js", // https://github.com/oven-sh/bun/issues/887
"test/Route.js", // https://github.com/oven-sh/bun/issues/2030
],
// Most tests fail due to lack of "http2"
disabled: true,
},
},
];
function github(repository: string): string {
return `git@github.com:${repository}.git`;
}

View File

@@ -1,92 +0,0 @@
import type { Package } from "./packages";
import { packages } from "./packages";
import { existsSync, copyFileSync } from "node:fs";
import { dirname, join } from "node:path";
import { globby } from "globby";
for (const pkg of packages) {
try {
await loadPackage(pkg, "tmp");
} catch (error) {
console.error(pkg.name, error);
}
}
async function loadPackage(pkg: Package, cwd?: string): Promise<void> {
await gitClone({
cwd,
repository: pkg.repository,
name: pkg.name,
});
const dir = join(cwd ?? "", pkg.name, pkg.cwd ?? "");
await spawn({
cwd: dir,
cmd: ["bun", "install"],
});
if (!pkg.tests || pkg.tests.style !== "jest") {
return;
}
const files = await globby(pkg.tests.include, {
cwd: dir,
ignore: pkg.tests.exclude ?? [crypto.randomUUID()],
onlyFiles: true,
caseSensitiveMatch: false,
});
if (!files.length) {
throw new Error("No tests found");
}
for (const file of files) {
let path = file;
if (!file.includes(".test.")) {
const ext = path.lastIndexOf(".");
path = file.substring(0, ext) + ".test" + file.substring(ext);
copyFileSync(join(dir, file), join(dir, path));
}
await spawn({
cwd: dir,
cmd: ["bun", "wiptest", path],
});
}
}
type GitCloneOptions = {
repository: string;
cwd?: string;
name?: string;
};
async function gitClone(options: GitCloneOptions): Promise<void> {
const name = options.name ?? dirname(options.repository);
const cwd = options.cwd ?? process.cwd();
const path = join(cwd, name);
if (existsSync(path)) {
await spawn({
cwd: path,
cmd: ["git", "pull"],
});
} else {
const url = `${options.repository}`;
await spawn({
cwd,
cmd: ["git", "clone", "--single-branch", "--depth", "1", url, name],
});
}
}
type SpawnOptions = {
cwd: string;
cmd: string[];
};
async function spawn({ cwd, cmd }: SpawnOptions) {
const { exited } = await Bun.spawn({
cwd,
cmd,
stdout: "inherit",
stderr: "inherit",
});
const exitCode = await exited;
if (exitCode !== 0) {
throw new Error(`"${cmd.join(" ")}" exited with ${exitCode}`);
}
}

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src"
]
}

230
packages/bun-error/package-lock.json generated Normal file
View File

@@ -0,0 +1,230 @@
{
"name": "bun-error",
"version": "1.0.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"@types/prop-types": {
"version": "15.7.5",
"resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz",
"integrity": "sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==",
"dev": true
},
"@types/react": {
"version": "17.0.47",
"resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.47.tgz",
"integrity": "sha512-mk0BL8zBinf2ozNr3qPnlu1oyVTYq+4V7WA76RgxUAtf0Em/Wbid38KN6n4abEkvO4xMTBWmnP1FtQzgkEiJoA==",
"dev": true,
"requires": {
"@types/prop-types": "*",
"@types/scheduler": "*",
"csstype": "^3.0.2"
}
},
"@types/scheduler": {
"version": "0.16.2",
"resolved": "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz",
"integrity": "sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==",
"dev": true
},
"csstype": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.0.tgz",
"integrity": "sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==",
"dev": true
},
"esbuild": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.48.tgz",
"integrity": "sha512-w6N1Yn5MtqK2U1/WZTX9ZqUVb8IOLZkZ5AdHkT6x3cHDMVsYWC7WPdiLmx19w3i4Rwzy5LqsEMtVihG3e4rFzA==",
"requires": {
"esbuild-android-64": "0.14.48",
"esbuild-android-arm64": "0.14.48",
"esbuild-darwin-64": "0.14.48",
"esbuild-darwin-arm64": "0.14.48",
"esbuild-freebsd-64": "0.14.48",
"esbuild-freebsd-arm64": "0.14.48",
"esbuild-linux-32": "0.14.48",
"esbuild-linux-64": "0.14.48",
"esbuild-linux-arm": "0.14.48",
"esbuild-linux-arm64": "0.14.48",
"esbuild-linux-mips64le": "0.14.48",
"esbuild-linux-ppc64le": "0.14.48",
"esbuild-linux-riscv64": "0.14.48",
"esbuild-linux-s390x": "0.14.48",
"esbuild-netbsd-64": "0.14.48",
"esbuild-openbsd-64": "0.14.48",
"esbuild-sunos-64": "0.14.48",
"esbuild-windows-32": "0.14.48",
"esbuild-windows-64": "0.14.48",
"esbuild-windows-arm64": "0.14.48"
}
},
"esbuild-android-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-64/-/esbuild-android-64-0.14.48.tgz",
"integrity": "sha512-3aMjboap/kqwCUpGWIjsk20TtxVoKck8/4Tu19rubh7t5Ra0Yrpg30Mt1QXXlipOazrEceGeWurXKeFJgkPOUg==",
"optional": true
},
"esbuild-android-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-android-arm64/-/esbuild-android-arm64-0.14.48.tgz",
"integrity": "sha512-vptI3K0wGALiDq+EvRuZotZrJqkYkN5282iAfcffjI5lmGG9G1ta/CIVauhY42MBXwEgDJkweiDcDMRLzBZC4g==",
"optional": true
},
"esbuild-darwin-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-64/-/esbuild-darwin-64-0.14.48.tgz",
"integrity": "sha512-gGQZa4+hab2Va/Zww94YbshLuWteyKGD3+EsVon8EWTWhnHFRm5N9NbALNbwi/7hQ/hM1Zm4FuHg+k6BLsl5UA==",
"optional": true
},
"esbuild-darwin-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-darwin-arm64/-/esbuild-darwin-arm64-0.14.48.tgz",
"integrity": "sha512-bFjnNEXjhZT+IZ8RvRGNJthLWNHV5JkCtuOFOnjvo5pC0sk2/QVk0Qc06g2PV3J0TcU6kaPC3RN9yy9w2PSLEA==",
"optional": true
},
"esbuild-freebsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-64/-/esbuild-freebsd-64-0.14.48.tgz",
"integrity": "sha512-1NOlwRxmOsnPcWOGTB10JKAkYSb2nue0oM1AfHWunW/mv3wERfJmnYlGzL3UAOIUXZqW8GeA2mv+QGwq7DToqA==",
"optional": true
},
"esbuild-freebsd-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-freebsd-arm64/-/esbuild-freebsd-arm64-0.14.48.tgz",
"integrity": "sha512-gXqKdO8wabVcYtluAbikDH2jhXp+Klq5oCD5qbVyUG6tFiGhrC9oczKq3vIrrtwcxDQqK6+HDYK8Zrd4bCA9Gw==",
"optional": true
},
"esbuild-linux-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-32/-/esbuild-linux-32-0.14.48.tgz",
"integrity": "sha512-ghGyDfS289z/LReZQUuuKq9KlTiTspxL8SITBFQFAFRA/IkIvDpnZnCAKTCjGXAmUqroMQfKJXMxyjJA69c/nQ==",
"optional": true
},
"esbuild-linux-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-64/-/esbuild-linux-64-0.14.48.tgz",
"integrity": "sha512-vni3p/gppLMVZLghI7oMqbOZdGmLbbKR23XFARKnszCIBpEMEDxOMNIKPmMItQrmH/iJrL1z8Jt2nynY0bE1ug==",
"optional": true
},
"esbuild-linux-arm": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm/-/esbuild-linux-arm-0.14.48.tgz",
"integrity": "sha512-+VfSV7Akh1XUiDNXgqgY1cUP1i2vjI+BmlyXRfVz5AfV3jbpde8JTs5Q9sYgaoq5cWfuKfoZB/QkGOI+QcL1Tw==",
"optional": true
},
"esbuild-linux-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-arm64/-/esbuild-linux-arm64-0.14.48.tgz",
"integrity": "sha512-3CFsOlpoxlKPRevEHq8aAntgYGYkE1N9yRYAcPyng/p4Wyx0tPR5SBYsxLKcgPB9mR8chHEhtWYz6EZ+H199Zw==",
"optional": true
},
"esbuild-linux-mips64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-mips64le/-/esbuild-linux-mips64le-0.14.48.tgz",
"integrity": "sha512-cs0uOiRlPp6ymknDnjajCgvDMSsLw5mST2UXh+ZIrXTj2Ifyf2aAP3Iw4DiqgnyYLV2O/v/yWBJx+WfmKEpNLA==",
"optional": true
},
"esbuild-linux-ppc64le": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-ppc64le/-/esbuild-linux-ppc64le-0.14.48.tgz",
"integrity": "sha512-+2F0vJMkuI0Wie/wcSPDCqXvSFEELH7Jubxb7mpWrA/4NpT+/byjxDz0gG6R1WJoeDefcrMfpBx4GFNN1JQorQ==",
"optional": true
},
"esbuild-linux-riscv64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-riscv64/-/esbuild-linux-riscv64-0.14.48.tgz",
"integrity": "sha512-BmaK/GfEE+5F2/QDrIXteFGKnVHGxlnK9MjdVKMTfvtmudjY3k2t8NtlY4qemKSizc+QwyombGWTBDc76rxePA==",
"optional": true
},
"esbuild-linux-s390x": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-linux-s390x/-/esbuild-linux-s390x-0.14.48.tgz",
"integrity": "sha512-tndw/0B9jiCL+KWKo0TSMaUm5UWBLsfCKVdbfMlb3d5LeV9WbijZ8Ordia8SAYv38VSJWOEt6eDCdOx8LqkC4g==",
"optional": true
},
"esbuild-netbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-netbsd-64/-/esbuild-netbsd-64-0.14.48.tgz",
"integrity": "sha512-V9hgXfwf/T901Lr1wkOfoevtyNkrxmMcRHyticybBUHookznipMOHoF41Al68QBsqBxnITCEpjjd4yAos7z9Tw==",
"optional": true
},
"esbuild-openbsd-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-openbsd-64/-/esbuild-openbsd-64-0.14.48.tgz",
"integrity": "sha512-+IHf4JcbnnBl4T52egorXMatil/za0awqzg2Vy6FBgPcBpisDWT2sVz/tNdrK9kAqj+GZG/jZdrOkj7wsrNTKA==",
"optional": true
},
"esbuild-sunos-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-sunos-64/-/esbuild-sunos-64-0.14.48.tgz",
"integrity": "sha512-77m8bsr5wOpOWbGi9KSqDphcq6dFeJyun8TA+12JW/GAjyfTwVtOnN8DOt6DSPUfEV+ltVMNqtXUeTeMAxl5KA==",
"optional": true
},
"esbuild-windows-32": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-32/-/esbuild-windows-32-0.14.48.tgz",
"integrity": "sha512-EPgRuTPP8vK9maxpTGDe5lSoIBHGKO/AuxDncg5O3NkrPeLNdvvK8oywB0zGaAZXxYWfNNSHskvvDgmfVTguhg==",
"optional": true
},
"esbuild-windows-64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-64/-/esbuild-windows-64-0.14.48.tgz",
"integrity": "sha512-YmpXjdT1q0b8ictSdGwH3M8VCoqPpK1/UArze3X199w6u8hUx3V8BhAi1WjbsfDYRBanVVtduAhh2sirImtAvA==",
"optional": true
},
"esbuild-windows-arm64": {
"version": "0.14.48",
"resolved": "https://registry.npmjs.org/esbuild-windows-arm64/-/esbuild-windows-arm64-0.14.48.tgz",
"integrity": "sha512-HHaOMCsCXp0rz5BT2crTka6MPWVno121NKApsGs/OIW5QC0ggC69YMGs1aJct9/9FSUF4A1xNE/cLvgB5svR4g==",
"optional": true
},
"js-tokens": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
},
"loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
"requires": {
"js-tokens": "^3.0.0 || ^4.0.0"
}
},
"object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
"react": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react/-/react-17.0.2.tgz",
"integrity": "sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
},
"react-dom": {
"version": "17.0.2",
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz",
"integrity": "sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1",
"scheduler": "^0.20.2"
}
},
"scheduler": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz",
"integrity": "sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==",
"requires": {
"loose-envify": "^1.1.0",
"object-assign": "^4.1.1"
}
}
}
}

View File

@@ -1,6 +0,0 @@
.DS_Store
.env
node_modules
/npm/**/bin
/npm/**/*.js
/npm/**/.npmrc

View File

@@ -1,2 +0,0 @@
@oven:registry=https://registry.npmjs.org/
//registry.npmjs.org/:_authToken=${NPM_TOKEN}

View File

@@ -1,15 +0,0 @@
# bun-release
Scripts that release Bun to npm, Dockerhub, Homebrew, etc.
### Running
```sh
bun run npm # build assets for the latest release
bun run npm -- <release> # build assets for the provided release
bun run npm -- <release> [dry-run|publish] # build and publish assets to npm
```
### Credits
- [esbuild](https://github.com/evanw/esbuild), for its npm scripts which this was largely based off of.

Binary file not shown.

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-aarch64",
"version": "0.5.3",
"description": "This is the macOS arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64-baseline",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-darwin-x64",
"version": "0.5.3",
"description": "This is the macOS x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"darwin"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-aarch64",
"version": "0.5.3",
"description": "This is the Linux arm64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"arm64"
]
}

View File

@@ -1,5 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh
_Note: "Baseline" builds are for machines that do not support [AVX2](https://en.wikipedia.org/wiki/Advanced_Vector_Extensions) instructions._

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64-baseline",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,3 +0,0 @@
# Bun
This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime. https://bun.sh

View File

@@ -1,16 +0,0 @@
{
"name": "@oven/bun-linux-x64",
"version": "0.5.3",
"description": "This is the Linux x64 binary for Bun, a fast all-in-one JavaScript runtime.",
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"repository": "https://github.com/oven-sh/bun",
"preferUnplugged": true,
"os": [
"linux"
],
"cpu": [
"x64"
]
}

View File

@@ -1,31 +0,0 @@
# Bun
Bun is a fast all-in-one JavaScript runtime. https://bun.sh
### Install
```sh
npm install -g bun
```
### Upgrade
```sh
bun upgrade
```
### Supported Platforms
- [macOS, arm64 (Apple Silicon)](https://www.npmjs.com/package/@oven/bun-darwin-aarch64)
- [macOS, x64](https://www.npmjs.com/package/@oven/bun-darwin-x64)
- [macOS, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-darwin-x64-baseline)
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
### Future Platforms
- [Windows](https://github.com/oven-sh/bun/issues/43)
- Unix-like variants such as FreeBSD, OpenBSD, etc.
- Android and iOS

View File

@@ -1,42 +0,0 @@
{
"name": "bun",
"version": "0.5.3",
"description": "Bun is a fast all-in-one JavaScript runtime.",
"keywords": [
"bun",
"bun.js",
"node",
"node.js",
"runtime",
"bundler",
"transpiler",
"typescript"
],
"homepage": "https://bun.sh",
"bugs": "https://github.com/oven-sh/issues",
"license": "MIT",
"bin": {
"bun": "bin/bun",
"bunx": "bin/bun"
},
"repository": "https://github.com/oven-sh/bun",
"scripts": {
"postinstall": "node install.js"
},
"optionalDependencies": {
"@oven/bun-darwin-aarch64": "0.5.3",
"@oven/bun-darwin-x64": "0.5.3",
"@oven/bun-darwin-x64-baseline": "0.5.3",
"@oven/bun-linux-aarch64": "0.5.3",
"@oven/bun-linux-x64": "0.5.3",
"@oven/bun-linux-x64-baseline": "0.5.3"
},
"os": [
"darwin",
"linux"
],
"cpu": [
"arm64",
"x64"
]
}

View File

@@ -1,19 +0,0 @@
{
"private": true,
"dependencies": {
"esbuild": "^0.17.3",
"jszip": "^3.10.1",
"octokit": "^2.0.14"
},
"devDependencies": {
"@octokit/types": "^8.1.1",
"bun-types": "^0.4.0",
"prettier": "^2.8.2"
},
"scripts": {
"format": "prettier --write src scripts",
"get-version": "bun scripts/get-version.ts",
"upload-npm": "bun scripts/upload-npm.ts",
"upload-assets": "bun scripts/upload-assets.ts"
}
}

View File

@@ -1,5 +0,0 @@
import { log } from "../src/console";
import { getSemver } from "../src/github";
log(await getSemver(process.argv[2]));
process.exit(0); // HACK

View File

@@ -1,13 +0,0 @@
import { importBun } from "../src/npm/install";
import { execFileSync } from "child_process";
importBun()
.then(bun => {
return execFileSync(bun, process.argv.slice(2), {
stdio: "inherit",
});
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,10 +0,0 @@
import { importBun, optimizeBun } from "../src/npm/install";
importBun()
.then(path => {
optimizeBun(path);
})
.catch(error => {
console.error(error);
process.exit(1);
});

View File

@@ -1,92 +0,0 @@
import { getRelease, uploadAsset } from "../src/github";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import { confirm, exit, log, stdin, warn } from "../src/console";
import { hash, join, rm, tmp, write, basename, blob } from "../src/fs";
const [tag, ...paths] = process.argv.slice(2);
if (!tag) {
exit("Invalid arguments: [tag] [...assets]");
}
const { tag_name, assets } = await getRelease(tag);
log("Release:", tag_name, "\n");
log("Existing assets:\n", ...assets.map(({ name }) => `- ${name}\n`));
log("Updating assets:\n", ...paths.map(path => `+ ${basename(path)}\n`));
await confirm();
log("Hashing assets...\n");
const existing: Map<string, string> = new Map();
for (const { name, browser_download_url } of assets) {
if (name.startsWith("SHASUMS256.txt")) {
continue;
}
const response = await fetch(browser_download_url);
const buffer = Buffer.from(await response.arrayBuffer());
existing.set(name, await hash(buffer));
}
const updated: Map<string, string> = new Map();
for (const path of paths) {
const name = basename(path);
updated.set(name, await hash(path));
}
log(
"Unchanged hashes:\n",
...Array.from(existing.entries())
.filter(([name]) => !updated.has(name))
.map(([name, sha256]) => ` - ${sha256} => ${name}\n`),
);
log("Changed hashes:\n", ...Array.from(updated.entries()).map(([name, sha256]) => ` + ${sha256} => ${name}\n`));
await confirm();
log("Signing assets...\n");
const cwd = tmp();
const path = join(cwd, "SHASUMS256.txt");
const signedPath = `${path}.asc`;
write(
path,
[...Array.from(updated.entries()), ...Array.from(existing.entries()).filter(([name]) => !updated.has(name))]
.sort(([a], [b]) => a.localeCompare(b))
.map(([name, sha256]) => `${sha256} ${name}`)
.join("\n"),
);
const { stdout: keys } = spawn("gpg", ["--list-secret-keys", "--keyid-format", "long"]);
const verifiedKeys = [
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59", // robobun@oven.sh
];
if (!verifiedKeys.find(key => keys.includes(key))) {
warn("Signature is probably wrong, key not found: robobun@oven.sh");
}
const passphrase = await stdin("Passphrase:");
log();
const { exitCode, stdout, stderr } = spawn(
"gpg",
["--pinentry-mode", "loopback", "--passphrase-fd", "0", "--clearsign", "--output", signedPath, path],
{
// @ts-ignore
input: passphrase,
stdout: "inherit",
stderr: "inherit",
},
);
if (exitCode !== 0) {
exit(stdout || stderr);
}
const uploads = [...paths, path, signedPath];
log("Uploading assets:\n", ...uploads.map(path => ` + ${basename(path)}\n`));
await confirm();
for (const path of uploads) {
const name = basename(path);
await uploadAsset(tag_name, name, blob(path));
}
try {
rm(cwd);
} catch {
warn("Failed to cleanup:", cwd, "\n");
}
log("Done");
process.exit(0); // FIXME

View File

@@ -1,164 +0,0 @@
import { join, copy, exists, chmod, write, writeJson } from "../src/fs";
import { fetch } from "../src/fetch";
import { spawn } from "../src/spawn";
import type { Platform } from "../src/platform";
import { platforms } from "../src/platform";
import { getSemver } from "../src/github";
import { getRelease } from "../src/github";
import type { BuildOptions } from "esbuild";
import { buildSync, formatMessagesSync } from "esbuild";
import type { JSZipObject } from "jszip";
import { loadAsync } from "jszip";
import { debug, log, error } from "../src/console";
const module = "bun";
const owner = "@oven";
let version: string;
const [tag, action] = process.argv.slice(2);
await build(tag);
if (action === "publish") {
await publish();
} else if (action === "dry-run") {
await publish(true);
} else if (action) {
throw new Error(`Unknown action: ${action}`);
}
process.exit(0); // HACK
async function build(tag?: string): Promise<void> {
const release = await getRelease(tag);
version = await getSemver(release.tag_name);
await buildRootModule();
for (const platform of platforms) {
await buildModule(release, platform);
}
}
async function publish(dryRun?: boolean): Promise<void> {
const modules = platforms.map(({ bin }) => `${owner}/${bin}`);
modules.push(module);
for (const module of modules) {
publishModule(module, dryRun);
}
}
async function buildRootModule() {
log("Building:", `${module}@${version}`);
const cwd = join("npm", module);
const define = {
version: `"${version}"`,
module: `"${module}"`,
owner: `"${owner}"`,
};
bundle(join("scripts", "npm-postinstall.ts"), join(cwd, "install.js"), {
define,
});
bundle(join("scripts", "npm-exec.ts"), join(cwd, "bin", "bun"), {
define,
banner: {
js: "#!/usr/bin/env node",
},
});
const os = [...new Set(platforms.map(({ os }) => os))];
const cpu = [...new Set(platforms.map(({ arch }) => arch))];
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
scripts: {
postinstall: "node install.js",
},
optionalDependencies: Object.fromEntries(platforms.map(({ bin }) => [`${owner}/${bin}`, version])),
bin: {
bun: "bin/bun",
bunx: "bin/bun",
},
os,
cpu,
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
async function buildModule(
release: Awaited<ReturnType<typeof getRelease>>,
{ bin, exe, os, arch }: Platform,
): Promise<void> {
const module = `${owner}/${bin}`;
log("Building:", `${module}@${version}`);
const asset = release.assets.find(({ name }) => name === `${bin}.zip`);
if (!asset) {
error(`No asset found: ${bin}`);
return;
}
const bun = await extractFromZip(asset.browser_download_url, `${bin}/bun`);
const cwd = join("npm", module);
write(join(cwd, exe), await bun.async("arraybuffer"));
chmod(join(cwd, exe), 0o755);
writeJson(join(cwd, "package.json"), {
name: module,
version: version,
preferUnplugged: true,
os: [os],
cpu: [arch],
});
if (exists(".npmrc")) {
copy(".npmrc", join(cwd, ".npmrc"));
}
}
function publishModule(name: string, dryRun?: boolean): void {
log(dryRun ? "Dry-run Publishing:" : "Publishing:", `${name}@${version}`);
const { exitCode, stdout, stderr } = spawn(
"npm",
[
"publish",
"--access",
"public",
"--tag",
version.includes("canary") ? "canary" : "latest",
...(dryRun ? ["--dry-run"] : []),
],
{
cwd: join("npm", name),
},
);
if (exitCode === 0) {
error(stderr || stdout);
}
}
async function extractFromZip(url: string, filename: string): Promise<JSZipObject> {
const response = await fetch(url);
const buffer = await response.arrayBuffer();
const zip = await loadAsync(buffer);
for (const [name, file] of Object.entries(zip.files)) {
if (!file.dir && name.startsWith(filename)) {
return file;
}
}
debug("Found files:", Object.keys(zip.files));
throw new Error(`File not found: ${filename}`);
}
function bundle(src: string, dst: string, options: BuildOptions = {}): void {
const { errors } = buildSync({
bundle: true,
treeShaking: true,
keepNames: true,
minifySyntax: true,
pure: ["console.debug"],
platform: "node",
target: "es6",
format: "cjs",
entryPoints: [src],
outfile: dst,
...options,
});
if (errors?.length) {
const messages = formatMessagesSync(errors, { kind: "error" });
throw new Error(messages.join("\n"));
}
}

View File

@@ -1,75 +0,0 @@
import { isatty } from "tty";
import { createInterface } from "readline";
export const isAction = !!process.env["GITHUB_ACTION"];
export const isDebug =
process.env["DEBUG"] === "1" || process.env["LOG_LEVEL"] === "debug" || process.env["RUNNER_DEBUG"] === "1";
export function debug(...message: any[]): void {
if (isAction) {
console.debug("::debug::", ...message);
} else if (isDebug) {
console.debug(...message);
}
}
export function log(...message: any[]): void {
console.log(...message);
}
export function warn(...message: any[]): void {
if (isAction) {
console.warn("::warning::", ...message);
} else {
console.warn(...message);
}
}
export function error(...message: any[]): void {
if (isAction) {
console.error("::error::", ...message);
} else {
console.error(...message);
}
}
export function exit(...message: any[]): never {
error(...message);
process.exit(1);
}
export function isTty(): boolean {
return isatty(process.stdout.fd);
}
export async function stdin(question: string): Promise<string> {
if (isTty()) {
return prompt(question) || "";
}
const reader = createInterface({
input: process.stdin,
terminal: false,
});
let buffer = "";
reader.on("line", line => {
buffer += line;
});
return new Promise(resolve => {
reader.once("close", () => resolve(buffer));
});
}
export async function confirm(message?: string): Promise<void> {
if (!isTty()) {
return;
}
const question = message ?? "Confirm?";
switch (prompt(`${question} [y/n]`)) {
case "y":
case "Y":
log();
return;
}
exit();
}

View File

@@ -1,70 +0,0 @@
import { debug, isDebug } from "./console";
export const fetch = "fetch" in globalThis ? webFetch : nodeFetch;
type Options = RequestInit & { assert?: boolean };
async function webFetch(url: string, options: Options = {}): Promise<Response> {
debug("fetch request", url, options);
const response = await globalThis.fetch(url, options, { verbose: isDebug });
debug("fetch response", response);
if (options?.assert !== false && !isOk(response.status)) {
try {
debug(await response.text());
} catch {}
throw new Error(`${response.status}: ${url}`);
}
return response;
}
async function nodeFetch(url: string, options: Options = {}): Promise<Response> {
const { get } = await import("node:http");
return new Promise((resolve, reject) => {
get(url, response => {
debug("http.get", url, response.statusCode);
const status = response.statusCode ?? 501;
if (response.headers.location && isRedirect(status)) {
return nodeFetch(url).then(resolve, reject);
}
if (options?.assert !== false && !isOk(status)) {
return reject(new Error(`${status}: ${url}`));
}
const body: Buffer[] = [];
response.on("data", chunk => {
body.push(chunk);
});
response.on("end", () => {
resolve({
ok: isOk(status),
status,
async arrayBuffer() {
return Buffer.concat(body).buffer as ArrayBuffer;
},
async text() {
return Buffer.concat(body).toString("utf-8");
},
async json() {
const text = Buffer.concat(body).toString("utf-8");
return JSON.parse(text);
},
} as Response);
});
}).on("error", reject);
});
}
function isOk(status: number): boolean {
return status >= 200 && status <= 204;
}
function isRedirect(status: number): boolean {
switch (status) {
case 301: // Moved Permanently
case 308: // Permanent Redirect
case 302: // Found
case 307: // Temporary Redirect
case 303: // See Other
return true;
}
return false;
}

View File

@@ -1,159 +0,0 @@
import path from "path";
import fs from "fs";
import os from "os";
import crypto from "crypto";
import { debug } from "./console";
export function join(...paths: (string | string[])[]): string {
return path.join(...paths.flat(2));
}
export function basename(...paths: (string | string[])[]): string {
return path.basename(join(...paths));
}
export function tmp(): string {
const tmpdir = process.env["RUNNER_TEMP"] ?? os.tmpdir();
const dir = fs.mkdtempSync(join(tmpdir, "bun-"));
debug("tmp", dir);
return dir;
}
export function rm(path: string): void {
debug("rm", path);
try {
fs.rmSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmSync failed", error);
// Did not exist before Node.js v14.
// Attempt again with older, slower implementation.
}
let stats: fs.Stats;
try {
stats = fs.lstatSync(path);
} catch (error) {
debug("fs.lstatSync failed", error);
// The file was likely deleted, so return early.
return;
}
if (!stats.isDirectory()) {
fs.unlinkSync(path);
return;
}
try {
fs.rmdirSync(path, { recursive: true });
return;
} catch (error) {
debug("fs.rmdirSync failed", error);
// Recursive flag did not exist before Node.js X.
// Attempt again with older, slower implementation.
}
for (const filename of fs.readdirSync(path)) {
rm(join(path, filename));
}
fs.rmdirSync(path);
}
export function rename(path: string, newPath: string): void {
debug("rename", path, newPath);
try {
fs.renameSync(path, newPath);
return;
} catch (error) {
debug("fs.renameSync failed", error);
// If there is an error, delete the new path and try again.
}
try {
rm(newPath);
} catch (error) {
debug("rm failed", error);
// The path could have been deleted already.
}
fs.renameSync(path, newPath);
}
export function write(dst: string, content: string | ArrayBuffer | ArrayBufferView): void {
debug("write", dst);
try {
fs.writeFileSync(dst, content);
return;
} catch (error) {
debug("fs.writeFileSync failed", error);
// If there is an error, ensure the parent directory
// exists and try again.
try {
fs.mkdirSync(path.dirname(dst), { recursive: true });
} catch (error) {
debug("fs.mkdirSync failed", error);
// The directory could have been created already.
}
fs.writeFileSync(dst, content);
}
}
export function writeJson(path: string, json: object, force?: boolean): void {
let value = json;
if (!force && exists(path)) {
try {
const existing = JSON.parse(read(path));
value = {
...existing,
...json,
};
} catch {
value = json;
}
}
write(path, `${JSON.stringify(value, undefined, 2)}\n`);
}
export function read(path: string): string {
debug("read", path);
return fs.readFileSync(path, "utf-8");
}
export function blob(path: string): Blob {
debug("blob", path);
if ("Bun" in globalThis) {
return Bun.file(path);
}
const buffer = fs.readFileSync(path);
return new Blob([buffer], {
type: path.endsWith(".zip") ? "application/zip" : path.endsWith(".txt") ? "text/plain" : "application/octet-stream",
});
}
export function hash(content: string | crypto.BinaryLike): string {
debug("hash", content);
return crypto
.createHash("sha256")
.update(typeof content === "string" ? fs.readFileSync(content) : content)
.digest("hex");
}
export function chmod(path: string, mode: fs.Mode): void {
debug("chmod", path, mode);
fs.chmodSync(path, mode);
}
export function copy(path: string, newPath: string): void {
debug("copy", path, newPath);
try {
fs.copyFileSync(path, newPath);
return;
} catch (error) {
debug("fs.copyFileSync failed", error);
}
write(newPath, read(path));
}
export function exists(path: string): boolean {
debug("exists", path);
try {
return fs.existsSync(path);
} catch (error) {
debug("fs.existsSync failed", error);
}
return false;
}

View File

@@ -1,120 +0,0 @@
import type { Endpoints, RequestParameters, Route } from "@octokit/types";
import { Octokit } from "octokit";
import { fetch } from "./fetch";
import { debug, log, warn, error } from "./console";
const [owner, repo] = process.env["GITHUB_REPOSITORY"]?.split("/") ?? ["oven-sh", "bun"];
const octokit = new Octokit({
auth: process.env["GITHUB_TOKEN"],
request: {
fetch,
},
log: {
debug,
info: log,
warn,
error,
},
});
export async function github<R extends Route>(
url: R | keyof Endpoints,
options?: Omit<
R extends keyof Endpoints ? Endpoints[R]["parameters"] & RequestParameters : RequestParameters,
"owner" | "repo"
>,
): Promise<R extends keyof Endpoints ? Endpoints[R]["response"]["data"] : unknown> {
// @ts-ignore
const { data } = await octokit.request(url, {
owner,
repo,
...options,
});
return data;
}
export async function getRelease(tag?: string) {
if (!tag) {
return github("GET /repos/{owner}/{repo}/releases/latest");
}
return github("GET /repos/{owner}/{repo}/releases/tags/{tag}", {
tag: formatTag(tag),
});
}
export async function uploadAsset(tag: string, name: string, blob: Blob) {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
// Github requires that existing assets are deleted before uploading
// a new asset, but does not provide a rename or re-upload API?!?
if (asset) {
await github("DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}", {
asset_id: asset.id,
});
}
return github("POST {origin}/repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
baseUrl: "https://uploads.github.com",
release_id: release.id,
name,
headers: {
"content-type": blob.type,
"content-length": blob.size,
},
data: Buffer.from(await blob.arrayBuffer()),
});
}
export async function downloadAsset(tag: string, name: string): Promise<Blob> {
const release = await getRelease(tag);
const asset = release.assets.find(asset => asset.name === name);
if (!asset) {
throw new Error(`Asset not found: ${name}`);
}
const response = await fetch(asset.browser_download_url);
return response.blob();
}
export async function getSha(tag: string, format?: "short" | "long") {
const ref = formatTag(tag);
const {
object: { sha },
} = await github("GET /repos/{owner}/{repo}/git/ref/{ref}", {
ref: ref === "canary" ? "heads/main" : `tags/${ref}`,
});
return format === "short" ? sha.substring(0, 7) : sha;
}
export async function getBuild(): Promise<number> {
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
const response = await fetch("https://registry.npmjs.org/-/package/bun/dist-tags");
const { canary }: { canary: string } = await response.json();
if (!canary.includes(date)) {
return 1;
}
const match = /canary.[0-9]{8}\.([0-9]+)+?/.exec(canary);
return match ? 1 + parseInt(match[1]) : 1;
}
export async function getSemver(tag?: string, build?: number): Promise<string> {
const { tag_name } = await getRelease(tag);
if (tag_name !== "canary") {
return tag_name.replace("bun-v", "");
}
if (build === undefined) {
build = await getBuild();
}
const sha = await getSha(tag_name, "short");
const date = new Date().toISOString().split("T")[0].replace(/-/g, "");
return `${Bun.version}-canary.${date}.${build}+${sha}`;
}
export function formatTag(tag: string): string {
if (tag === "canary" || tag.startsWith("bun-v")) {
return tag;
}
if (tag.startsWith("v")) {
return tag.slice(1);
}
return `bun-v${tag}`;
}

View File

@@ -1,144 +0,0 @@
import { fetch } from "../fetch";
import { spawn } from "../spawn";
import { chmod, join, rename, rm, tmp, write } from "../fs";
import { unzipSync } from "zlib";
import type { Platform } from "../platform";
import { os, arch, supportedPlatforms } from "../platform";
import { debug, error } from "../console";
declare const version: string;
declare const module: string;
declare const owner: string;
export async function importBun(): Promise<string> {
if (!supportedPlatforms.length) {
throw new Error(`Unsupported platform: ${os} ${arch}`);
}
for (const platform of supportedPlatforms) {
try {
return await requireBun(platform);
} catch (error) {
debug("requireBun failed", error);
}
}
throw new Error(`Failed to install package "${module}"`);
}
async function requireBun(platform: Platform): Promise<string> {
const module = `${owner}/${platform.bin}`;
function resolveBun() {
const exe = require.resolve(join(module, platform.exe));
const { exitCode, stderr, stdout } = spawn(exe, ["--version"]);
if (exitCode === 0) {
return exe;
}
throw new Error(stderr || stdout);
}
try {
return resolveBun();
} catch (cause) {
debug("resolveBun failed", cause);
error(
`Failed to find package "${module}".`,
`You may have used the "--no-optional" flag when running "npm install".`,
);
}
const cwd = join("node_modules", module);
try {
installBun(platform, cwd);
} catch (cause) {
debug("installBun failed", cause);
error(`Failed to install package "${module}" using "npm install".`, cause);
try {
await downloadBun(platform, cwd);
} catch (cause) {
debug("downloadBun failed", cause);
error(`Failed to download package "${module}" from "registry.npmjs.org".`, cause);
}
}
return resolveBun();
}
function installBun(platform: Platform, dst: string): void {
const module = `${owner}/${platform.bin}`;
const cwd = tmp();
try {
write(join(cwd, "package.json"), "{}");
const { exitCode } = spawn(
"npm",
["install", "--loglevel=error", "--prefer-offline", "--no-audit", "--progress=false", `${module}@${version}`],
{
cwd,
stdio: "pipe",
env: {
...process.env,
npm_config_global: undefined,
},
},
);
if (exitCode === 0) {
rename(join(cwd, "node_modules", module), dst);
}
} finally {
try {
rm(cwd);
} catch (error) {
debug("rm failed", error);
// There is nothing to do if the directory cannot be cleaned up.
}
}
}
async function downloadBun(platform: Platform, dst: string): Promise<void> {
const response = await fetch(`https://registry.npmjs.org/${owner}/${platform.bin}/-/${platform.bin}-${version}.tgz`);
const tgz = await response.arrayBuffer();
let buffer: Buffer;
try {
buffer = unzipSync(tgz);
} catch (cause) {
throw new Error("Invalid gzip data", { cause });
}
function str(i: number, n: number): string {
return String.fromCharCode(...buffer.subarray(i, i + n)).replace(/\0.*$/, "");
}
let offset = 0;
while (offset < buffer.length) {
const name = str(offset, 100).replace("package/", "");
const size = parseInt(str(offset + 124, 12), 8);
offset += 512;
if (!isNaN(size)) {
write(join(dst, name), buffer.subarray(offset, offset + size));
if (name === platform.exe) {
try {
chmod(join(dst, name), 0o755);
} catch (error) {
debug("chmod failed", error);
}
}
offset += (size + 511) & ~511;
}
}
}
export function optimizeBun(path: string): void {
if (os === "win32") {
throw new Error(
"You must use Windows Subsystem for Linux, aka. WSL, to run bun. Learn more: https://learn.microsoft.com/en-us/windows/wsl/install",
);
}
const { npm_config_user_agent } = process.env;
if (npm_config_user_agent && /\byarn\//.test(npm_config_user_agent)) {
throw new Error(
"Yarn does not support bun, because it does not allow linking to binaries. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}
try {
rename(path, join(__dirname, "bin", "bun"));
return;
} catch (error) {
debug("optimizeBun failed", error);
}
throw new Error(
"Your package manager doesn't seem to support bun. To use bun, install using the following command: curl -fsSL https://bun.sh/install | bash",
);
}

View File

@@ -1,91 +0,0 @@
import { spawn } from "./spawn";
import { read } from "./fs";
import { debug } from "./console";
export const os = process.platform;
export const arch = os === "darwin" && process.arch === "x64" && isRosetta2() ? "arm64" : process.arch;
export const avx2 = (arch === "x64" && os === "linux" && isLinuxAVX2()) || (os === "darwin" && isDarwinAVX2());
export type Platform = {
os: string;
arch: string;
avx2?: boolean;
bin: string;
exe: string;
};
export const platforms: Platform[] = [
{
os: "darwin",
arch: "arm64",
bin: "bun-darwin-aarch64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
avx2: true,
bin: "bun-darwin-x64",
exe: "bin/bun",
},
{
os: "darwin",
arch: "x64",
bin: "bun-darwin-x64-baseline",
exe: "bin/bun",
},
{
os: "linux",
arch: "arm64",
bin: "bun-linux-aarch64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
avx2: true,
bin: "bun-linux-x64",
exe: "bin/bun",
},
{
os: "linux",
arch: "x64",
bin: "bun-linux-x64-baseline",
exe: "bin/bun",
},
];
export const supportedPlatforms: Platform[] = platforms
.filter(platform => platform.os === os && platform.arch === arch && (!platform.avx2 || avx2))
.sort((a, b) => (a.avx2 === b.avx2 ? 0 : a.avx2 ? -1 : 1));
function isLinuxAVX2(): boolean {
try {
return read("/proc/cpuinfo").includes("avx2");
} catch (error) {
debug("isLinuxAVX2 failed", error);
return false;
}
}
function isDarwinAVX2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "machdep.cpu"]);
return exitCode === 0 && stdout.includes("AVX2");
} catch (error) {
debug("isDarwinAVX2 failed", error);
return false;
}
}
function isRosetta2(): boolean {
try {
const { exitCode, stdout } = spawn("sysctl", ["-n", "sysctl.proc_translated"]);
return exitCode === 0 && stdout.includes("1");
} catch (error) {
debug("isRosetta2 failed", error);
return false;
}
}

View File

@@ -1,24 +0,0 @@
import child_process from "child_process";
import { debug } from "./console";
export function spawn(
cmd: string,
args: string[],
options: child_process.SpawnOptions = {},
): {
exitCode: number;
stdout: string;
stderr: string;
} {
debug("spawn", [cmd, ...args].join(" "));
const { status, stdout, stderr } = child_process.spawnSync(cmd, args, {
stdio: "pipe",
encoding: "utf-8",
...options,
});
return {
exitCode: status ?? 1,
stdout,
stderr,
};
}

View File

@@ -1,17 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src",
"scripts"
]
}

View File

@@ -1,3 +0,0 @@
.DS_Store
.env
node_modules

View File

@@ -1,3 +0,0 @@
# bun-test
Scripts to run Bun's tests using `bun wiptest`.

Binary file not shown.

View File

@@ -1,13 +0,0 @@
{
"private": true,
"dependencies": {
"@actions/core": "^1.10.0"
},
"devDependencies": {
"bun-types": "canary",
"prettier": "^2.8.2"
},
"scripts": {
"test": "bun run src/runner.ts"
}
}

View File

@@ -1,86 +0,0 @@
import { spawn } from "bun";
import { readdirSync } from "node:fs";
import { resolve } from "node:path";
import * as action from "@actions/core";
const cwd = resolve("../..");
const isAction = !!process.env["GITHUB_ACTION"];
const errorPattern = /error: ([\S\s]*?)(?=\n.*?at (\/.*):(\d+):(\d+))/mgi;
function* findTests(dir: string, query?: string): Generator<string> {
for (const entry of readdirSync(resolve(dir), { encoding: "utf-8", withFileTypes: true })) {
const path = resolve(dir, entry.name);
if (entry.isDirectory()) {
yield* findTests(path, query);
} else if (entry.isFile() && entry.name.includes(".test.")) {
yield path;
}
}
}
async function runTest(path: string): Promise<void> {
const name = path.replace(cwd, "").slice(1);
const runner = await spawn({
cwd,
cmd: ["bun", "wiptest", path],
stdout: "pipe",
stderr: "pipe",
});
const exitCode = await Promise.race([
new Promise((resolve) => {
setTimeout(() => {
runner.kill();
resolve(124); // Timed Out
}, 60_000);
}),
runner.exited,
]);
if (isAction) {
const prefix = exitCode === 0
? "PASS"
: `FAIL (exit code ${exitCode})`;
action.startGroup(`${prefix} - ${name}`);
}
for (const stdout of [runner.stdout, runner.stderr]) {
if (!stdout) {
continue;
}
const reader = stdout.getReader();
while (true) {
const { value, done } = await reader.read();
if (value) {
console.write(value);
if (isAction) {
findErrors(value);
}
}
if (done) {
break;
}
}
}
if (isAction) {
action.endGroup();
}
}
let failed = false;
function findErrors(data: Uint8Array): void {
const text = new TextDecoder().decode(data);
for (const [message, _, path, line, col] of text.matchAll(errorPattern)) {
failed = true;
action.error(message, {
file: path.replace(cwd, "").slice(1),
startLine: parseInt(line),
startColumn: parseInt(col),
});
}
}
const tests = [];
for (const path of findTests(resolve(cwd, "test/bun.js"))) {
tests.push(runTest(path).catch(console.error));
}
await Promise.allSettled(tests);
process.exit(failed ? 1 : 0);

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "node",
"types": ["bun-types"],
"esModuleInterop": true,
"allowJs": true,
"strict": true,
"resolveJsonModule": true
},
"include": [
"src"
]
}

View File

@@ -1,6 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 80,
"trailingComma": "all",
"useTabs": false
}

View File

@@ -22,10 +22,10 @@ Add this to your `tsconfig.json` or `jsconfig.json`:
```jsonc
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "ESNext",
"target": "ESNext",
"moduleResolution": "Node",
"lib": ["esnext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "node",
// "bun-types" is the important part
"types": ["bun-types"]
}

View File

@@ -767,7 +767,7 @@ declare module "bun" {
* const query = UserQuery;
* ```
*/
macro?: MacroMap;
macros?: MacroMap;
autoImportJSX?: boolean;
allowBunRuntime?: boolean;
@@ -1785,7 +1785,7 @@ declare module "bun" {
*
*/
// tslint:disable-next-line:unified-signatures
export function file(path: string | URL, options?: BlobPropertyBag): FileBlob;
export function file(path: string, options?: BlobPropertyBag): FileBlob;
/**
* `Blob` that leverages the fastest system calls available to operate on files.
@@ -2151,40 +2151,6 @@ declare module "bun" {
static readonly algorithms: SupportedCryptoAlgorithms[];
}
/**
* Resolve a `Promise` after milliseconds. This is like
* {@link setTimeout} except it returns a `Promise`.
*
* @param ms milliseconds to delay resolving the promise. This is a minimum
* number. It may take longer. If a {@link Date} is passed, it will sleep until the
* {@link Date} is reached.
*
* @example
* ## Sleep for 1 second
* ```ts
* import { sleep } from "bun";
*
* await sleep(1000);
* ```
* ## Sleep for 10 milliseconds
* ```ts
* await Bun.sleep(10);
* ```
* ## Sleep until `Date`
*
* ```ts
* const target = new Date();
* target.setSeconds(target.getSeconds() + 1);
* await Bun.sleep(target);
* ```
* Internally, `Bun.sleep` is the equivalent of
* ```ts
* await new Promise((resolve) => setTimeout(resolve, ms));
* ```
* As always, you can use `Bun.sleep` or the imported `sleep` function interchangeably.
*/
export function sleep(ms: number | Date): Promise<void>;
/**
* Sleep the thread for a given number of milliseconds
*
@@ -2795,26 +2761,11 @@ declare module "bun" {
interface TCPSocket extends Socket {}
interface TLSSocket extends Socket {}
type BinaryTypeList = {
arraybuffer: ArrayBuffer;
buffer: Buffer;
uint8array: Uint8Array;
// TODO: DataView
// dataview: DataView;
};
type BinaryType = keyof BinaryTypeList;
interface SocketHandler<
Data = unknown,
DataBinaryType extends BinaryType = "buffer",
> {
interface SocketHandler<Data = unknown> {
open(socket: Socket<Data>): void | Promise<void>;
close?(socket: Socket<Data>): void | Promise<void>;
error?(socket: Socket<Data>, error: Error): void | Promise<void>;
data?(
socket: Socket<Data>,
data: BinaryTypeList[DataBinaryType],
): void | Promise<void>;
data?(socket: Socket<Data>, data: BufferSource): void | Promise<void>;
drain?(socket: Socket<Data>): void | Promise<void>;
/**
@@ -2837,23 +2788,6 @@ declare module "bun" {
* to the promise rejection queue.
*/
connectError?(socket: Socket<Data>, error: Error): void | Promise<void>;
/**
* Choose what `ArrayBufferView` is returned in the {@link SocketHandler.data} callback.
*
* @default "buffer"
*
* @remarks
* This lets you select the desired binary type for the `data` callback.
* It's a small performance optimization to let you avoid creating extra
* ArrayBufferView objects when possible.
*
* Bun originally defaulted to `Uint8Array` but when dealing with network
* data, it's more useful to be able to directly read from the bytes which
* `Buffer` allows.
*
*/
binaryType?: BinaryType;
}
interface SocketOptions<Data = unknown> {
@@ -3149,7 +3083,6 @@ declare module "bun" {
* ```
*/
readonly params: Record<string, string>;
readonly filePath: string;
readonly pathname: string;
readonly query: Record<string, string>;
readonly name: string;

Binary file not shown.

View File

@@ -1,148 +0,0 @@
/**
* The `console` module provides a simple debugging console that is similar to the
* JavaScript console mechanism provided by web browsers.
*
* The module exports two specific components:
*
* * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream.
* * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`.
*
* _**Warning**_: The global console object's methods are neither consistently
* synchronous like the browser APIs they resemble, nor are they consistently
* asynchronous like all other Node.js streams. See the `note on process I/O` for
* more information.
*
* Example using the global `console`:
*
* ```js
* console.log('hello world');
* // Prints: hello world, to stdout
* console.log('hello %s', 'world');
* // Prints: hello world, to stdout
* console.error(new Error('Whoops, something bad happened'));
* // Prints error message and stack trace to stderr:
* // Error: Whoops, something bad happened
* // at [eval]:5:15
* // at Script.runInThisContext (node:vm:132:18)
* // at Object.runInThisContext (node:vm:309:38)
* // at node:internal/process/execution:77:19
* // at [eval]-wrapper:6:22
* // at evalScript (node:internal/process/execution:76:60)
* // at node:internal/main/eval_string:23:3
*
* const name = 'Will Robinson';
* console.warn(`Danger ${name}! Danger!`);
* // Prints: Danger Will Robinson! Danger!, to stderr
* ```
*
* Example using the `Console` class:
*
* ```js
* const out = getStreamSomehow();
* const err = getStreamSomehow();
* const myConsole = new console.Console(out, err);
*
* myConsole.log('hello world');
* // Prints: hello world, to out
* myConsole.log('hello %s', 'world');
* // Prints: hello world, to out
* myConsole.error(new Error('Whoops, something bad happened'));
* // Prints: [Error: Whoops, something bad happened], to err
*
* const name = 'Will Robinson';
* myConsole.warn(`Danger ${name}! Danger!`);
* // Prints: Danger Will Robinson! Danger!, to err
* ```
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/console.js)
*/
declare module "console" {
import console = require("node:console");
export = console;
}
declare module "node:console" {
// import { InspectOptions } from "node:util";
// global {
/**
* The `console` module provides a simple debugging console that is similar to the
* JavaScript console mechanism provided by web browsers.
*
* The module exports two specific components:
*
* * A `Console` class with methods such as `console.log()`, `console.error()` and`console.warn()` that can be used to write to any Node.js stream.
* * A global `console` instance configured to write to `process.stdout` and `process.stderr`. The global `console` can be used without calling`require('console')`.
*
* _**Warning**_: The global console object's methods are neither consistently
* synchronous like the browser APIs they resemble, nor are they consistently
* asynchronous like all other Node.js streams. See the `note on process I/O` for
* more information.
*
* Example using the global `console`:
*
* ```js
* console.log('hello world');
* // Prints: hello world, to stdout
* console.log('hello %s', 'world');
* // Prints: hello world, to stdout
* console.error(new Error('Whoops, something bad happened'));
* // Prints error message and stack trace to stderr:
* // Error: Whoops, something bad happened
* // at [eval]:5:15
* // at Script.runInThisContext (node:vm:132:18)
* // at Object.runInThisContext (node:vm:309:38)
* // at node:internal/process/execution:77:19
* // at [eval]-wrapper:6:22
* // at evalScript (node:internal/process/execution:76:60)
* // at node:internal/main/eval_string:23:3
*
* const name = 'Will Robinson';
* console.warn(`Danger ${name}! Danger!`);
* // Prints: Danger Will Robinson! Danger!, to stderr
* ```
*
* Example using the `Console` class:
*
* ```js
* const out = getStreamSomehow();
* const err = getStreamSomehow();
* const myConsole = new console.Console(out, err);
*
* myConsole.log('hello world');
* // Prints: hello world, to out
* myConsole.log('hello %s', 'world');
* // Prints: hello world, to out
* myConsole.error(new Error('Whoops, something bad happened'));
* // Prints: [Error: Whoops, something bad happened], to err
*
* const name = 'Will Robinson';
* myConsole.warn(`Danger ${name}! Danger!`);
* // Prints: Danger Will Robinson! Danger!, to err
* ```
* @see [source](https://github.com/nodejs/node/blob/v16.4.2/lib/console.js)
*/
// import {Writable} from "node:stream";
// namespace console {
// interface ConsoleConstructorOptions {
// stdout: Writable;
// stderr?: Writable | undefined;
// ignoreErrors?: boolean | undefined;
// colorMode?: boolean | "auto" | undefined;
// inspectOptions?: InspectOptions | undefined;
// /**
// * Set group indentation
// * @default 2
// */
// groupIndentation?: number | undefined;
// }
// interface ConsoleConstructor {
// prototype: Console;
// new (stdout: Writable, stderr?: Writable, ignoreErrors?: boolean): Console;
// new (options: ConsoleConstructorOptions): Console;
// }
// }
// }
// const console: Console;
export = console;
}

View File

@@ -318,112 +318,127 @@ declare module "dns" {
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "A",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "AAAA",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "ANY",
callback: (err: ErrnoException | null, addresses: AnyRecord[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "CNAME",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "MX",
callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NAPTR",
callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "NS",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "PTR",
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SOA",
callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
): void;
export function resolve(
hostname: string,
rrtype: "SRV",
callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
): void;
export function resolve(
hostname: string,
rrtype: "TXT",
callback: (err: ErrnoException | null, addresses: string[][]) => void,
): void;
export function resolve(
hostname: string,
rrtype: string,
callback: (
err: ErrnoException | null,
addresses:
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[],
) => void,
): void;
export namespace resolve {
function __promisify__(
hostname: string,
rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
): Promise<string[]>;
function __promisify__(
hostname: string,
rrtype: "ANY",
): Promise<AnyRecord[]>;
function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function __promisify__(
hostname: string,
rrtype: "NAPTR",
): Promise<NaptrRecord[]>;
function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function __promisify__(
hostname: string,
rrtype: "SRV",
): Promise<SrvRecord[]>;
function __promisify__(
hostname: string,
rrtype: "TXT",
): Promise<string[][]>;
function __promisify__(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
}
// export function resolve(
// hostname: string,
// rrtype: "A",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "AAAA",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "ANY",
// callback: (
// err: ErrnoException | null,
// addresses: AnyRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "CNAME",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "MX",
// callback: (
// err: ErrnoException | null,
// addresses: MxRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NAPTR",
// callback: (
// err: ErrnoException | null,
// addresses: NaptrRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "NS",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "PTR",
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SOA",
// callback: (err: ErrnoException | null, addresses: SoaRecord) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "SRV",
// callback: (
// err: ErrnoException | null,
// addresses: SrvRecord[],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: "TXT",
// callback: (
// err: ErrnoException | null,
// addresses: string[][],
// ) => void,
// ): void;
// export function resolve(
// hostname: string,
// rrtype: string,
// callback: (
// err: ErrnoException | null,
// addresses:
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[],
// ) => void,
// ): void;
// export namespace resolve {
// function __promisify__(
// hostname: string,
// rrtype?: "A" | "AAAA" | "CNAME" | "NS" | "PTR",
// ): Promise<string[]>;
// function __promisify__(
// hostname: string,
// rrtype: "ANY",
// ): Promise<AnyRecord[]>;
// function __promisify__(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "NAPTR",
// ): Promise<NaptrRecord[]>;
// function __promisify__(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function __promisify__(
// hostname: string,
// rrtype: "SRV",
// ): Promise<SrvRecord[]>;
// function __promisify__(
// hostname: string,
// rrtype: "TXT",
// ): Promise<string[][]>;
// function __promisify__(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
// }
/**
* Uses the DNS protocol to resolve a IPv4 addresses (`A` records) for the`hostname`. The `addresses` argument passed to the `callback` function
* will contain an array of IPv4 addresses (e.g.`['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -497,38 +512,38 @@ declare module "dns" {
* will contain an array of canonical name records available for the `hostname`(e.g. `['bar.example.com']`).
* @since v0.3.2
*/
export function resolveCname(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveCname {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveCname(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveCname {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve `CAA` records for the `hostname`. The`addresses` argument passed to the `callback` function
* will contain an array of certification authority authorization records
* available for the `hostname` (e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'}, {critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
export function resolveCaa(
hostname: string,
callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
): void;
export namespace resolveCaa {
function __promisify__(hostname: string): Promise<CaaRecord[]>;
}
// export function resolveCaa(
// hostname: string,
// callback: (err: ErrnoException | null, records: CaaRecord[]) => void,
// ): void;
// export namespace resolveCaa {
// function __promisify__(hostname: string): Promise<CaaRecord[]>;
// }
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of objects containing both a `priority` and `exchange`property (e.g. `[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v0.1.27
*/
export function resolveMx(
hostname: string,
callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
): void;
export namespace resolveMx {
function __promisify__(hostname: string): Promise<MxRecord[]>;
}
// export function resolveMx(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: MxRecord[]) => void,
// ): void;
// export namespace resolveMx {
// function __promisify__(hostname: string): Promise<MxRecord[]>;
// }
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. The `addresses` argument passed to the `callback`function will contain an array of
* objects with the following properties:
@@ -552,37 +567,37 @@ declare module "dns" {
* ```
* @since v0.9.12
*/
export function resolveNaptr(
hostname: string,
callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
): void;
export namespace resolveNaptr {
function __promisify__(hostname: string): Promise<NaptrRecord[]>;
}
// export function resolveNaptr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: NaptrRecord[]) => void,
// ): void;
// export namespace resolveNaptr {
// function __promisify__(hostname: string): Promise<NaptrRecord[]>;
// }
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* contain an array of name server records available for `hostname`(e.g. `['ns1.example.com', 'ns2.example.com']`).
* @since v0.1.90
*/
export function resolveNs(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolveNs {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolveNs(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolveNs {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of strings containing the reply records.
* @since v6.0.0
*/
export function resolvePtr(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[]) => void,
): void;
export namespace resolvePtr {
function __promisify__(hostname: string): Promise<string[]>;
}
// export function resolvePtr(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[]) => void,
// ): void;
// export namespace resolvePtr {
// function __promisify__(hostname: string): Promise<string[]>;
// }
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. The `address` argument passed to the `callback` function will
@@ -609,13 +624,13 @@ declare module "dns" {
* ```
* @since v0.11.10
*/
export function resolveSoa(
hostname: string,
callback: (err: ErrnoException | null, address: SoaRecord) => void,
): void;
export namespace resolveSoa {
function __promisify__(hostname: string): Promise<SoaRecord>;
}
// export function resolveSoa(
// hostname: string,
// callback: (err: ErrnoException | null, address: SoaRecord) => void,
// ): void;
// export namespace resolveSoa {
// function __promisify__(hostname: string): Promise<SoaRecord>;
// }
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. The `addresses` argument passed to the `callback` function will
* be an array of objects with the following properties:
@@ -635,13 +650,13 @@ declare module "dns" {
* ```
* @since v0.1.27
*/
export function resolveSrv(
hostname: string,
callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
): void;
export namespace resolveSrv {
function __promisify__(hostname: string): Promise<SrvRecord[]>;
}
// export function resolveSrv(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: SrvRecord[]) => void,
// ): void;
// export namespace resolveSrv {
// function __promisify__(hostname: string): Promise<SrvRecord[]>;
// }
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. The `records` argument passed to the `callback` function is a
* two-dimensional array of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -649,13 +664,13 @@ declare module "dns" {
* treated separately.
* @since v0.1.27
*/
export function resolveTxt(
hostname: string,
callback: (err: ErrnoException | null, addresses: string[][]) => void,
): void;
export namespace resolveTxt {
function __promisify__(hostname: string): Promise<string[][]>;
}
// export function resolveTxt(
// hostname: string,
// callback: (err: ErrnoException | null, addresses: string[][]) => void,
// ): void;
// export namespace resolveTxt {
// function __promisify__(hostname: string): Promise<string[][]>;
// }
/**
* Uses the DNS protocol to resolve all records (also known as `ANY` or `*` query).
* The `ret` argument passed to the `callback` function will be an array containing
@@ -846,14 +861,14 @@ declare module "dns" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
/**
* The resolver instance will send its requests from the specified IP address.

View File

@@ -10,12 +10,12 @@ declare module "dns/promises" {
LookupOneOptions,
LookupAllOptions,
LookupOptions,
AnyRecord,
CaaRecord,
MxRecord,
NaptrRecord,
SoaRecord,
SrvRecord,
// AnyRecord,
// CaaRecord,
// MxRecord,
// NaptrRecord,
// SoaRecord,
// SrvRecord,
ResolveWithTtlOptions,
RecordWithTtl,
ResolveOptions,
@@ -134,30 +134,30 @@ declare module "dns/promises" {
* @param [rrtype='A'] Resource record type.
*/
function resolve(hostname: string): Promise<string[]>;
function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
function resolve(
hostname: string,
rrtype: string,
): Promise<
| string[]
| MxRecord[]
| NaptrRecord[]
| SoaRecord
| SrvRecord[]
| string[][]
| AnyRecord[]
>;
// function resolve(hostname: string, rrtype: "A"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "AAAA"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "ANY"): Promise<AnyRecord[]>;
// function resolve(hostname: string, rrtype: "CAA"): Promise<CaaRecord[]>;
// function resolve(hostname: string, rrtype: "CNAME"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "MX"): Promise<MxRecord[]>;
// function resolve(hostname: string, rrtype: "NAPTR"): Promise<NaptrRecord[]>;
// function resolve(hostname: string, rrtype: "NS"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "PTR"): Promise<string[]>;
// function resolve(hostname: string, rrtype: "SOA"): Promise<SoaRecord>;
// function resolve(hostname: string, rrtype: "SRV"): Promise<SrvRecord[]>;
// function resolve(hostname: string, rrtype: "TXT"): Promise<string[][]>;
// function resolve(
// hostname: string,
// rrtype: string,
// ): Promise<
// | string[]
// | MxRecord[]
// | NaptrRecord[]
// | SoaRecord
// | SrvRecord[]
// | string[][]
// | AnyRecord[]
// >;
/**
* Uses the DNS protocol to resolve IPv4 addresses (`A` records) for the`hostname`. On success, the `Promise` is resolved with an array of IPv4
* addresses (e.g. `['74.125.79.104', '74.125.79.105', '74.125.79.106']`).
@@ -223,20 +223,20 @@ declare module "dns/promises" {
* certification authority authorization records available for the `hostname`(e.g. `[{critical: 0, iodef: 'mailto:pki@example.com'},{critical: 128, issue: 'pki.example.com'}]`).
* @since v15.0.0, v14.17.0
*/
function resolveCaa(hostname: string): Promise<CaaRecord[]>;
// function resolveCaa(hostname: string): Promise<CaaRecord[]>;
/**
* Uses the DNS protocol to resolve `CNAME` records for the `hostname`. On success,
* the `Promise` is resolved with an array of canonical name records available for
* the `hostname` (e.g. `['bar.example.com']`).
* @since v10.6.0
*/
function resolveCname(hostname: string): Promise<string[]>;
// function resolveCname(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve mail exchange records (`MX` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects
* containing both a `priority` and `exchange` property (e.g.`[{priority: 10, exchange: 'mx.example.com'}, ...]`).
* @since v10.6.0
*/
function resolveMx(hostname: string): Promise<MxRecord[]>;
// function resolveMx(hostname: string): Promise<MxRecord[]>;
/**
* Uses the DNS protocol to resolve regular expression based records (`NAPTR`records) for the `hostname`. On success, the `Promise` is resolved with an array
* of objects with the following properties:
@@ -260,19 +260,19 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
// function resolveNaptr(hostname: string): Promise<NaptrRecord[]>;
/**
* Uses the DNS protocol to resolve name server records (`NS` records) for the`hostname`. On success, the `Promise` is resolved with an array of name server
* records available for `hostname` (e.g.`['ns1.example.com', 'ns2.example.com']`).
* @since v10.6.0
*/
function resolveNs(hostname: string): Promise<string[]>;
// function resolveNs(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve pointer records (`PTR` records) for the`hostname`. On success, the `Promise` is resolved with an array of strings
* containing the reply records.
* @since v10.6.0
*/
function resolvePtr(hostname: string): Promise<string[]>;
// function resolvePtr(hostname: string): Promise<string[]>;
/**
* Uses the DNS protocol to resolve a start of authority record (`SOA` record) for
* the `hostname`. On success, the `Promise` is resolved with an object with the
@@ -299,7 +299,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSoa(hostname: string): Promise<SoaRecord>;
// function resolveSoa(hostname: string): Promise<SoaRecord>;
/**
* Uses the DNS protocol to resolve service records (`SRV` records) for the`hostname`. On success, the `Promise` is resolved with an array of objects with
* the following properties:
@@ -319,7 +319,7 @@ declare module "dns/promises" {
* ```
* @since v10.6.0
*/
function resolveSrv(hostname: string): Promise<SrvRecord[]>;
// function resolveSrv(hostname: string): Promise<SrvRecord[]>;
/**
* Uses the DNS protocol to resolve text queries (`TXT` records) for the`hostname`. On success, the `Promise` is resolved with a two-dimensional array
* of the text records available for `hostname` (e.g.`[ ['v=spf1 ip4:0.0.0.0 ', '~all' ] ]`). Each sub-array contains TXT chunks of
@@ -327,7 +327,7 @@ declare module "dns/promises" {
* treated separately.
* @since v10.6.0
*/
function resolveTxt(hostname: string): Promise<string[][]>;
// function resolveTxt(hostname: string): Promise<string[][]>;
/**
* Performs a reverse DNS query that resolves an IPv4 or IPv6 address to an
* array of host names.
@@ -384,14 +384,14 @@ declare module "dns/promises" {
resolve4: typeof resolve4;
resolve6: typeof resolve6;
// resolveAny: typeof resolveAny;
resolveCname: typeof resolveCname;
resolveMx: typeof resolveMx;
resolveNaptr: typeof resolveNaptr;
resolveNs: typeof resolveNs;
resolvePtr: typeof resolvePtr;
resolveSoa: typeof resolveSoa;
resolveSrv: typeof resolveSrv;
resolveTxt: typeof resolveTxt;
// resolveCname: typeof resolveCname;
// resolveMx: typeof resolveMx;
// resolveNaptr: typeof resolveNaptr;
// resolveNs: typeof resolveNs;
// resolvePtr: typeof resolvePtr;
// resolveSoa: typeof resolveSoa;
// resolveSrv: typeof resolveSrv;
// resolveTxt: typeof resolveTxt;
// reverse: typeof reverse;
// setLocalAddress(ipv4?: string, ipv6?: string): void;
// setServers: typeof setServers;

View File

@@ -69,7 +69,7 @@ interface ArrayConstructor {
asyncItems: AsyncIterable<T> | Iterable<T> | ArrayLike<T>,
mapfn?: (value: any, index: number) => any,
thisArg?: any,
): Promise<Array<T>>;
): Array<T>;
}
interface Console {
@@ -344,7 +344,7 @@ interface Process {
arch: Architecture;
platform: Platform;
argv: string[];
execArgv: string[];
// execArgv: string[];
env: Bun.Env;
/** Whether you are using Bun */
@@ -414,7 +414,6 @@ interface BlobInterface {
text(): Promise<string>;
arrayBuffer(): Promise<ArrayBuffer>;
json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
formData(): Promise<FormData>;
}
type BlobPart = string | Blob | BufferSource | ArrayBuffer;
@@ -502,51 +501,6 @@ type ResponseType =
| "opaque"
| "opaqueredirect";
type FormDataEntryValue = Blob | string;
/** Provides a way to easily construct a set of key/value pairs representing
* form fields and their values, which can then be easily sent using the
* XMLHttpRequest.send() method. It uses the same format a form would use if the
* encoding type were set to "multipart/form-data".
*/
interface FormData {
/**
* Appends a new value onto an existing key inside a FormData object, or adds
* the key if it does not already exist.
*
* @param name The name of the field whose data is contained in value.
* @param value The field's value.
* @param fileName The filename reported to the server.
*
* ## Upload a file
* ```ts
* const formData = new FormData();
* formData.append("username", "abc123");
* formData.append("avatar", Bun.file("avatar.png"), "avatar.png");
* await fetch("https://example.com", { method: "POST", body: formData });
* ```
*/
append(name: string, value: string | Blob, fileName?: string): void;
delete(name: string): void;
get(name: string): FormDataEntryValue | null;
getAll(name: string): FormDataEntryValue[];
has(name: string): boolean;
set(name: string, value: string | Blob, fileName?: string): void;
keys(): IterableIterator<string>;
values(): IterableIterator<string>;
entries(): IterableIterator<[string, FormDataEntryValue]>;
[Symbol.iterator](): IterableIterator<[string, FormDataEntryValue]>;
forEach(
callback: (value: FormDataEntryValue, key: string, parent: this) => void,
thisArg?: any,
): void;
}
declare var FormData: {
prototype: FormData;
new (): FormData;
};
declare class Blob implements BlobInterface {
/**
* Create a new [Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob)
@@ -591,20 +545,6 @@ declare class Blob implements BlobInterface {
*/
json<TJSONReturnType = unknown>(): Promise<TJSONReturnType>;
/**
* Read the data from the blob as a {@link FormData} object.
*
* This first decodes the data from UTF-8, then parses it as a
* `multipart/form-data` body or a `application/x-www-form-urlencoded` body.
*
* The `type` property of the blob is used to determine the format of the
* body.
*
* This is a non-standard addition to the `Blob` API, to make it conform more
* closely to the `BodyMixin` API.
*/
formData(): Promise<FormData>;
type: string;
size: number;
}
@@ -636,7 +576,7 @@ interface ResponseInit {
*/
declare class Response implements BlobInterface {
constructor(
body?: ReadableStream | BlobPart | BlobPart[] | null | FormData,
body?: ReadableStream | BlobPart | BlobPart[] | null,
options?: ResponseInit,
);
@@ -751,18 +691,6 @@ declare class Response implements BlobInterface {
*/
blob(): Promise<Blob>;
/**
* Read the data from the Response as a {@link FormData} object.
*
* This first decodes the data from UTF-8, then parses it as a
* `multipart/form-data` body or a `application/x-www-form-urlencoded` body.
*
* If no `Content-Type` header is present, the promise will be rejected.
*
* @returns Promise<FormData> - The body of the response as a {@link FormData}.
*/
formData(): Promise<FormData>;
readonly ok: boolean;
readonly redirected: boolean;
/**
@@ -824,10 +752,10 @@ type ReferrerPolicy =
| "strict-origin"
| "strict-origin-when-cross-origin"
| "unsafe-url";
type RequestInfo = Request | string | RequestInit;
type RequestInfo = Request | string;
type BodyInit = ReadableStream | XMLHttpRequestBodyInit;
type XMLHttpRequestBodyInit = Blob | BufferSource | string | FormData;
type XMLHttpRequestBodyInit = Blob | BufferSource | string;
type ReadableStreamController<T> = ReadableStreamDefaultController<T>;
type ReadableStreamDefaultReadResult<T> =
| ReadableStreamDefaultReadValueResult<T>
@@ -906,21 +834,6 @@ interface RequestInit {
timeout?: boolean;
}
interface FetchRequestInit extends RequestInit {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
verbose?: boolean;
/**
* Override http_proxy or HTTPS_PROXY
* This is a custom property that is not part of the Fetch API specification.
*/
proxy?: string;
}
/**
* [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) represents an HTTP request.
*
@@ -1063,19 +976,9 @@ declare class Request implements BlobInterface {
/** Copy the Request object into a new Request, including the body */
clone(): Request;
/**
* Read the body from the Request as a {@link FormData} object.
*
* This first decodes the data from UTF-8, then parses it as a
* `multipart/form-data` body or a `application/x-www-form-urlencoded` body.
*
* @returns Promise<FormData> - The body of the request as a {@link FormData}.
*/
formData(): Promise<FormData>;
}
declare interface Crypto {
interface Crypto {
readonly subtle: SubtleCrypto;
getRandomValues<T extends BufferSource = BufferSource>(array: T): T;
@@ -1091,10 +994,6 @@ declare interface Crypto {
*/
randomUUID(): string;
}
declare var Crypto: {
prototype: Crypto;
new (): Crypto;
};
declare var crypto: Crypto;
@@ -1326,11 +1225,6 @@ declare function clearInterval(id?: number): void;
* @param id timer id
*/
declare function clearTimeout(id?: number): void;
/**
* Cancel an immediate function call by its immediate ID.
* @param id immediate id
*/
declare function clearImmediate(id?: number): void;
// declare function createImageBitmap(image: ImageBitmapSource, options?: ImageBitmapOptions): Promise<ImageBitmap>;
// declare function createImageBitmap(image: ImageBitmapSource, sx: number, sy: number, sw: number, sh: number, options?: ImageBitmapOptions): Promise<ImageBitmap>;
/**
@@ -1343,10 +1237,20 @@ declare function clearImmediate(id?: number): void;
*
*
*/
declare function fetch(
url: string | URL,
init?: FetchRequestInit,
url: string,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
/**
@@ -1360,7 +1264,21 @@ declare function fetch(
*
*/
// tslint:disable-next-line:unified-signatures
declare function fetch(request: Request, init?: RequestInit): Promise<Response>;
declare function fetch(
request: Request,
init?: RequestInit,
/**
* This is a custom property that is not part of the Fetch API specification.
* It exists mostly as a debugging tool
*/
bunOnlyOptions?: {
/**
* Log the raw HTTP request & response to stdout. This API may be
* removed in a future version of Bun without notice.
*/
verbose: boolean;
},
): Promise<Response>;
declare function queueMicrotask(callback: (...args: any[]) => void): void;
/**
@@ -1709,27 +1627,6 @@ declare var MessageEvent: {
new <T>(type: string, eventInitDict?: MessageEventInit<T>): MessageEvent<T>;
};
interface CustomEventInit<T = any> extends EventInit {
detail?: T;
}
interface CustomEvent<T = any> extends Event {
/** Returns any custom data event was created with. Typically used for synthetic events. */
readonly detail: T;
/** @deprecated */
initCustomEvent(
type: string,
bubbles?: boolean,
cancelable?: boolean,
detail?: T,
): void;
}
declare var CustomEvent: {
prototype: CustomEvent;
new <T>(type: string, eventInitDict?: CustomEventInit<T>): CustomEvent<T>;
};
/**
* An implementation of the [WebSocket API](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket)
*/
@@ -2390,43 +2287,6 @@ interface ErrnoException extends Error {
syscall?: string | undefined;
}
/** An abnormal event (called an exception) which occurs as a result of calling a method or accessing a property of a web API. */
interface DOMException extends Error {
/** @deprecated */
readonly code: number;
readonly message: string;
readonly name: string;
readonly ABORT_ERR: number;
readonly DATA_CLONE_ERR: number;
readonly DOMSTRING_SIZE_ERR: number;
readonly HIERARCHY_REQUEST_ERR: number;
readonly INDEX_SIZE_ERR: number;
readonly INUSE_ATTRIBUTE_ERR: number;
readonly INVALID_ACCESS_ERR: number;
readonly INVALID_CHARACTER_ERR: number;
readonly INVALID_MODIFICATION_ERR: number;
readonly INVALID_NODE_TYPE_ERR: number;
readonly INVALID_STATE_ERR: number;
readonly NAMESPACE_ERR: number;
readonly NETWORK_ERR: number;
readonly NOT_FOUND_ERR: number;
readonly NOT_SUPPORTED_ERR: number;
readonly NO_DATA_ALLOWED_ERR: number;
readonly NO_MODIFICATION_ALLOWED_ERR: number;
readonly QUOTA_EXCEEDED_ERR: number;
readonly SECURITY_ERR: number;
readonly SYNTAX_ERR: number;
readonly TIMEOUT_ERR: number;
readonly TYPE_MISMATCH_ERR: number;
readonly URL_MISMATCH_ERR: number;
readonly VALIDATION_ERR: number;
readonly WRONG_DOCUMENT_ERR: number;
}
declare var DOMException: {
prototype: DOMException;
new (message?: string, name?: string): DOMException;
};
declare function alert(message?: string): void;
declare function confirm(message?: string): boolean;
declare function prompt(message?: string, _default?: string): string | null;
@@ -2922,154 +2782,3 @@ interface SharedArrayBuffer {
*/
grow(size: number): SharedArrayBuffer;
}
declare namespace WebAssembly {
interface CompileError extends Error {}
var CompileError: {
prototype: CompileError;
new (message?: string): CompileError;
(message?: string): CompileError;
};
interface Global {
value: any;
valueOf(): any;
}
var Global: {
prototype: Global;
new (descriptor: GlobalDescriptor, v?: any): Global;
};
interface Instance {
readonly exports: Exports;
}
var Instance: {
prototype: Instance;
new (module: Module, importObject?: Imports): Instance;
};
interface LinkError extends Error {}
var LinkError: {
prototype: LinkError;
new (message?: string): LinkError;
(message?: string): LinkError;
};
interface Memory {
readonly buffer: ArrayBuffer;
grow(delta: number): number;
}
var Memory: {
prototype: Memory;
new (descriptor: MemoryDescriptor): Memory;
};
interface Module {}
var Module: {
prototype: Module;
new (bytes: BufferSource): Module;
customSections(moduleObject: Module, sectionName: string): ArrayBuffer[];
exports(moduleObject: Module): ModuleExportDescriptor[];
imports(moduleObject: Module): ModuleImportDescriptor[];
};
interface RuntimeError extends Error {}
var RuntimeError: {
prototype: RuntimeError;
new (message?: string): RuntimeError;
(message?: string): RuntimeError;
};
interface Table {
readonly length: number;
get(index: number): any;
grow(delta: number, value?: any): number;
set(index: number, value?: any): void;
}
var Table: {
prototype: Table;
new (descriptor: TableDescriptor, value?: any): Table;
};
interface GlobalDescriptor {
mutable?: boolean;
value: ValueType;
}
interface MemoryDescriptor {
initial: number;
maximum?: number;
shared?: boolean;
}
interface ModuleExportDescriptor {
kind: ImportExportKind;
name: string;
}
interface ModuleImportDescriptor {
kind: ImportExportKind;
module: string;
name: string;
}
interface TableDescriptor {
element: TableKind;
initial: number;
maximum?: number;
}
interface WebAssemblyInstantiatedSource {
instance: Instance;
module: Module;
}
type ImportExportKind = "function" | "global" | "memory" | "table";
type TableKind = "anyfunc" | "externref";
type ValueType =
| "anyfunc"
| "externref"
| "f32"
| "f64"
| "i32"
| "i64"
| "v128";
type ExportValue = Function | Global | Memory | Table;
type Exports = Record<string, ExportValue>;
type ImportValue = ExportValue | number;
type Imports = Record<string, ModuleImports>;
type ModuleImports = Record<string, ImportValue>;
function compile(bytes: BufferSource): Promise<Module>;
// function compileStreaming(source: Response | PromiseLike<Response>): Promise<Module>;
function instantiate(
bytes: BufferSource,
importObject?: Imports,
): Promise<WebAssemblyInstantiatedSource>;
function instantiate(
moduleObject: Module,
importObject?: Imports,
): Promise<Instance>;
// function instantiateStreaming(
// source: Response | PromiseLike<Response>,
// importObject?: Imports,
// ): Promise<WebAssemblyInstantiatedSource>;
function validate(bytes: BufferSource): boolean;
}
interface NodeModule {
exports: any;
}
declare var module: NodeModule;
// Same as module.exports
declare var exports: any;
declare var global: typeof globalThis;

View File

@@ -10,7 +10,6 @@
/// <reference path="./bun-test.d.ts" />
/// <reference path="./bun.d.ts" />
/// <reference path="./child_process.d.ts" />
/// <reference path="./console.d.ts" />
/// <reference path="./constants.d.ts" />
/// <reference path="./crypto.d.ts" />
/// <reference path="./dns.d.ts" />
@@ -28,7 +27,6 @@
/// <reference path="./net.d.ts" />
/// <reference path="./os.d.ts" />
/// <reference path="./path.d.ts" />
/// <reference path="./perf_hooks.d.ts" />
/// <reference path="./punycode.d.ts" />
/// <reference path="./querystring.d.ts" />
/// <reference path="./readline.d.ts" />

View File

@@ -1,625 +0,0 @@
/**
* This module provides an implementation of a subset of the W3C [Web Performance APIs](https://w3c.github.io/perf-timing-primer/) as well as additional APIs for
* Node.js-specific performance measurements.
*
* Node.js supports the following [Web Performance APIs](https://w3c.github.io/perf-timing-primer/):
*
* * [High Resolution Time](https://www.w3.org/TR/hr-time-2)
* * [Performance Timeline](https://w3c.github.io/performance-timeline/)
* * [User Timing](https://www.w3.org/TR/user-timing/)
*
* ```js
* const { PerformanceObserver, performance } = require('perf_hooks');
*
* const obs = new PerformanceObserver((items) => {
* console.log(items.getEntries()[0].duration);
* performance.clearMarks();
* });
* obs.observe({ type: 'measure' });
* performance.measure('Start to Now');
*
* performance.mark('A');
* doSomeLongRunningProcess(() => {
* performance.measure('A to Now', 'A');
*
* performance.mark('B');
* performance.measure('A to B', 'A', 'B');
* });
* ```
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/perf_hooks.js)
*/
declare module "perf_hooks" {
// import { AsyncResource } from "node:async_hooks";
// type EntryType = "node" | "mark" | "measure" | "gc" | "function" | "http2" | "http";
// interface NodeGCPerformanceDetail {
// /**
// * When `performanceEntry.entryType` is equal to 'gc', `the performance.kind` property identifies
// * the type of garbage collection operation that occurred.
// * See perf_hooks.constants for valid values.
// */
// readonly kind?: number | undefined;
// /**
// * When `performanceEntry.entryType` is equal to 'gc', the `performance.flags`
// * property contains additional information about garbage collection operation.
// * See perf_hooks.constants for valid values.
// */
// readonly flags?: number | undefined;
// }
// /**
// * @since v8.5.0
// */
// class PerformanceEntry {
// protected constructor();
// /**
// * The total number of milliseconds elapsed for this entry. This value will not
// * be meaningful for all Performance Entry types.
// * @since v8.5.0
// */
// readonly duration: number;
// /**
// * The name of the performance entry.
// * @since v8.5.0
// */
// readonly name: string;
// /**
// * The high resolution millisecond timestamp marking the starting time of the
// * Performance Entry.
// * @since v8.5.0
// */
// readonly startTime: number;
// /**
// * The type of the performance entry. It may be one of:
// *
// * * `'node'` (Node.js only)
// * * `'mark'` (available on the Web)
// * * `'measure'` (available on the Web)
// * * `'gc'` (Node.js only)
// * * `'function'` (Node.js only)
// * * `'http2'` (Node.js only)
// * * `'http'` (Node.js only)
// * @since v8.5.0
// */
// readonly entryType: EntryType;
// /**
// * Additional detail specific to the `entryType`.
// * @since v16.0.0
// */
// readonly detail?: NodeGCPerformanceDetail | unknown | undefined; // TODO: Narrow this based on entry type.
// toJSON(): any;
// }
// class PerformanceMark extends PerformanceEntry {
// readonly duration: 0;
// readonly entryType: "mark";
// }
// class PerformanceMeasure extends PerformanceEntry {
// readonly entryType: "measure";
// }
// /**
// * _This property is an extension by Node.js. It is not available in Web browsers._
// *
// * Provides timing details for Node.js itself. The constructor of this class
// * is not exposed to users.
// * @since v8.5.0
// */
// class PerformanceNodeTiming extends PerformanceEntry {
// /**
// * The high resolution millisecond timestamp at which the Node.js process
// * completed bootstrapping. If bootstrapping has not yet finished, the property
// * has the value of -1.
// * @since v8.5.0
// */
// readonly bootstrapComplete: number;
// /**
// * The high resolution millisecond timestamp at which the Node.js environment was
// * initialized.
// * @since v8.5.0
// */
// readonly environment: number;
// /**
// * The high resolution millisecond timestamp of the amount of time the event loop
// * has been idle within the event loop's event provider (e.g. `epoll_wait`). This
// * does not take CPU usage into consideration. If the event loop has not yet
// * started (e.g., in the first tick of the main script), the property has the
// * value of 0.
// * @since v14.10.0, v12.19.0
// */
// readonly idleTime: number;
// /**
// * The high resolution millisecond timestamp at which the Node.js event loop
// * exited. If the event loop has not yet exited, the property has the value of -1\.
// * It can only have a value of not -1 in a handler of the `'exit'` event.
// * @since v8.5.0
// */
// readonly loopExit: number;
// /**
// * The high resolution millisecond timestamp at which the Node.js event loop
// * started. If the event loop has not yet started (e.g., in the first tick of the
// * main script), the property has the value of -1.
// * @since v8.5.0
// */
// readonly loopStart: number;
// /**
// * The high resolution millisecond timestamp at which the V8 platform was
// * initialized.
// * @since v8.5.0
// */
// readonly v8Start: number;
// }
// interface EventLoopUtilization {
// idle: number;
// active: number;
// utilization: number;
// }
// /**
// * @param util1 The result of a previous call to eventLoopUtilization()
// * @param util2 The result of a previous call to eventLoopUtilization() prior to util1
// */
// type EventLoopUtilityFunction = (util1?: EventLoopUtilization, util2?: EventLoopUtilization) => EventLoopUtilization;
// interface MarkOptions {
// /**
// * Additional optional detail to include with the mark.
// */
// detail?: unknown | undefined;
// /**
// * An optional timestamp to be used as the mark time.
// * @default `performance.now()`.
// */
// startTime?: number | undefined;
// }
// interface MeasureOptions {
// /**
// * Additional optional detail to include with the mark.
// */
// detail?: unknown | undefined;
// /**
// * Duration between start and end times.
// */
// duration?: number | undefined;
// /**
// * Timestamp to be used as the end time, or a string identifying a previously recorded mark.
// */
// end?: number | string | undefined;
// /**
// * Timestamp to be used as the start time, or a string identifying a previously recorded mark.
// */
// start?: number | string | undefined;
// }
// interface TimerifyOptions {
// /**
// * A histogram object created using
// * `perf_hooks.createHistogram()` that will record runtime durations in
// * nanoseconds.
// */
// histogram?: RecordableHistogram | undefined;
// }
interface Performance {
/**
* If name is not provided, removes all PerformanceMark objects from the Performance Timeline.
* If name is provided, removes only the named mark.
* @param name
*/
// clearMarks(name?: string): void;
/**
* If name is not provided, removes all PerformanceMeasure objects from the Performance Timeline.
* If name is provided, removes only the named measure.
* @param name
* @since v16.7.0
*/
// clearMeasures(name?: string): void;
/**
* Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`.
* If you are only interested in performance entries of certain types or that have certain names, see
* `performance.getEntriesByType()` and `performance.getEntriesByName()`.
* @since v16.7.0
*/
// getEntries(): PerformanceEntry[];
/**
* Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`
* whose `performanceEntry.name` is equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to `type`.
* @param name
* @param type
* @since v16.7.0
*/
// getEntriesByName(name: string, type?: EntryType): PerformanceEntry[];
/**
* Returns a list of `PerformanceEntry` objects in chronological order with respect to `performanceEntry.startTime`
* whose `performanceEntry.entryType` is equal to `type`.
* @param type
* @since v16.7.0
*/
// getEntriesByType(type: EntryType): PerformanceEntry[];
/**
* Creates a new PerformanceMark entry in the Performance Timeline.
* A PerformanceMark is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'mark',
* and whose performanceEntry.duration is always 0.
* Performance marks are used to mark specific significant moments in the Performance Timeline.
* @param name
* @return The PerformanceMark entry that was created
*/
// mark(name?: string, options?: MarkOptions): PerformanceMark;
/**
* Creates a new PerformanceMeasure entry in the Performance Timeline.
* A PerformanceMeasure is a subclass of PerformanceEntry whose performanceEntry.entryType is always 'measure',
* and whose performanceEntry.duration measures the number of milliseconds elapsed since startMark and endMark.
*
* The startMark argument may identify any existing PerformanceMark in the the Performance Timeline, or may identify
* any of the timestamp properties provided by the PerformanceNodeTiming class. If the named startMark does not exist,
* then startMark is set to timeOrigin by default.
*
* The endMark argument must identify any existing PerformanceMark in the the Performance Timeline or any of the timestamp
* properties provided by the PerformanceNodeTiming class. If the named endMark does not exist, an error will be thrown.
* @param name
* @param startMark
* @param endMark
* @return The PerformanceMeasure entry that was created
*/
// measure(name: string, startMark?: string, endMark?: string): PerformanceMeasure;
// measure(name: string, options: MeasureOptions): PerformanceMeasure;
/**
* An instance of the PerformanceNodeTiming class that provides performance metrics for specific Node.js operational milestones.
*/
// readonly nodeTiming: PerformanceNodeTiming;
/**
* @return the current high resolution millisecond timestamp
*/
now(): number;
/**
* The timeOrigin specifies the high resolution millisecond timestamp from which all performance metric durations are measured.
*/
readonly timeOrigin: number;
/**
* Wraps a function within a new function that measures the running time of the wrapped function.
* A PerformanceObserver must be subscribed to the 'function' event type in order for the timing details to be accessed.
* @param fn
*/
// timerify<T extends (...params: any[]) => any>(fn: T, options?: TimerifyOptions): T;
/**
* eventLoopUtilization is similar to CPU utilization except that it is calculated using high precision wall-clock time.
* It represents the percentage of time the event loop has spent outside the event loop's event provider (e.g. epoll_wait).
* No other CPU idle time is taken into consideration.
*/
// eventLoopUtilization: EventLoopUtilityFunction;
}
// interface PerformanceObserverEntryList {
// /**
// * Returns a list of `PerformanceEntry` objects in chronological order
// * with respect to `performanceEntry.startTime`.
// *
// * ```js
// * const {
// * performance,
// * PerformanceObserver
// * } = require('perf_hooks');
// *
// * const obs = new PerformanceObserver((perfObserverList, observer) => {
// * console.log(perfObserverList.getEntries());
// *
// * * [
// * * PerformanceEntry {
// * * name: 'test',
// * * entryType: 'mark',
// * * startTime: 81.465639,
// * * duration: 0
// * * },
// * * PerformanceEntry {
// * * name: 'meow',
// * * entryType: 'mark',
// * * startTime: 81.860064,
// * * duration: 0
// * * }
// * * ]
// *
// *
// * performance.clearMarks();
// * performance.clearMeasures();
// * observer.disconnect();
// * });
// * obs.observe({ type: 'mark' });
// *
// * performance.mark('test');
// * performance.mark('meow');
// * ```
// * @since v8.5.0
// */
// getEntries(): PerformanceEntry[];
// /**
// * Returns a list of `PerformanceEntry` objects in chronological order
// * with respect to `performanceEntry.startTime` whose `performanceEntry.name` is
// * equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to`type`.
// *
// * ```js
// * const {
// * performance,
// * PerformanceObserver
// * } = require('perf_hooks');
// *
// * const obs = new PerformanceObserver((perfObserverList, observer) => {
// * console.log(perfObserverList.getEntriesByName('meow'));
// *
// * * [
// * * PerformanceEntry {
// * * name: 'meow',
// * * entryType: 'mark',
// * * startTime: 98.545991,
// * * duration: 0
// * * }
// * * ]
// *
// * console.log(perfObserverList.getEntriesByName('nope')); // []
// *
// * console.log(perfObserverList.getEntriesByName('test', 'mark'));
// *
// * * [
// * * PerformanceEntry {
// * * name: 'test',
// * * entryType: 'mark',
// * * startTime: 63.518931,
// * * duration: 0
// * * }
// * * ]
// *
// * console.log(perfObserverList.getEntriesByName('test', 'measure')); // []
// *
// * performance.clearMarks();
// * performance.clearMeasures();
// * observer.disconnect();
// * });
// * obs.observe({ entryTypes: ['mark', 'measure'] });
// *
// * performance.mark('test');
// * performance.mark('meow');
// * ```
// * @since v8.5.0
// */
// getEntriesByName(name: string, type?: EntryType): PerformanceEntry[];
// /**
// * Returns a list of `PerformanceEntry` objects in chronological order
// * with respect to `performanceEntry.startTime` whose `performanceEntry.entryType`is equal to `type`.
// *
// * ```js
// * const {
// * performance,
// * PerformanceObserver
// * } = require('perf_hooks');
// *
// * const obs = new PerformanceObserver((perfObserverList, observer) => {
// * console.log(perfObserverList.getEntriesByType('mark'));
// *
// * * [
// * * PerformanceEntry {
// * * name: 'test',
// * * entryType: 'mark',
// * * startTime: 55.897834,
// * * duration: 0
// * * },
// * * PerformanceEntry {
// * * name: 'meow',
// * * entryType: 'mark',
// * * startTime: 56.350146,
// * * duration: 0
// * * }
// * * ]
// *
// * performance.clearMarks();
// * performance.clearMeasures();
// * observer.disconnect();
// * });
// * obs.observe({ type: 'mark' });
// *
// * performance.mark('test');
// * performance.mark('meow');
// * ```
// * @since v8.5.0
// */
// getEntriesByType(type: EntryType): PerformanceEntry[];
// }
// type PerformanceObserverCallback = (list: PerformanceObserverEntryList, observer: PerformanceObserver) => void;
// class PerformanceObserver extends AsyncResource {
// constructor(callback: PerformanceObserverCallback);
// /**
// * Disconnects the `PerformanceObserver` instance from all notifications.
// * @since v8.5.0
// */
// disconnect(): void;
// /**
// * Subscribes the `PerformanceObserver` instance to notifications of new `PerformanceEntry` instances identified either by `options.entryTypes`or `options.type`:
// *
// * ```js
// * const {
// * performance,
// * PerformanceObserver
// * } = require('perf_hooks');
// *
// * const obs = new PerformanceObserver((list, observer) => {
// * // Called once asynchronously. `list` contains three items.
// * });
// * obs.observe({ type: 'mark' });
// *
// * for (let n = 0; n < 3; n++)
// * performance.mark(`test${n}`);
// * ```
// * @since v8.5.0
// */
// observe(
// options:
// | {
// entryTypes: ReadonlyArray<EntryType>;
// buffered?: boolean | undefined;
// }
// | {
// type: EntryType;
// buffered?: boolean | undefined;
// },
// ): void;
// }
// namespace constants {
// const NODE_PERFORMANCE_GC_MAJOR: number;
// const NODE_PERFORMANCE_GC_MINOR: number;
// const NODE_PERFORMANCE_GC_INCREMENTAL: number;
// const NODE_PERFORMANCE_GC_WEAKCB: number;
// const NODE_PERFORMANCE_GC_FLAGS_NO: number;
// const NODE_PERFORMANCE_GC_FLAGS_CONSTRUCT_RETAINED: number;
// const NODE_PERFORMANCE_GC_FLAGS_FORCED: number;
// const NODE_PERFORMANCE_GC_FLAGS_SYNCHRONOUS_PHANTOM_PROCESSING: number;
// const NODE_PERFORMANCE_GC_FLAGS_ALL_AVAILABLE_GARBAGE: number;
// const NODE_PERFORMANCE_GC_FLAGS_ALL_EXTERNAL_MEMORY: number;
// const NODE_PERFORMANCE_GC_FLAGS_SCHEDULE_IDLE: number;
// }
const performance: Performance;
// interface EventLoopMonitorOptions {
// /**
// * The sampling rate in milliseconds.
// * Must be greater than zero.
// * @default 10
// */
// resolution?: number | undefined;
// }
// interface Histogram {
// /**
// * Returns a `Map` object detailing the accumulated percentile distribution.
// * @since v11.10.0
// */
// readonly percentiles: Map<number, number>;
// /**
// * The number of times the event loop delay exceeded the maximum 1 hour event
// * loop delay threshold.
// * @since v11.10.0
// */
// readonly exceeds: number;
// /**
// * The minimum recorded event loop delay.
// * @since v11.10.0
// */
// readonly min: number;
// /**
// * The maximum recorded event loop delay.
// * @since v11.10.0
// */
// readonly max: number;
// /**
// * The mean of the recorded event loop delays.
// * @since v11.10.0
// */
// readonly mean: number;
// /**
// * The standard deviation of the recorded event loop delays.
// * @since v11.10.0
// */
// readonly stddev: number;
// /**
// * Resets the collected histogram data.
// * @since v11.10.0
// */
// reset(): void;
// /**
// * Returns the value at the given percentile.
// * @since v11.10.0
// * @param percentile A percentile value in the range (0, 100].
// */
// percentile(percentile: number): number;
// }
// interface IntervalHistogram extends Histogram {
// /**
// * Enables the update interval timer. Returns `true` if the timer was
// * started, `false` if it was already started.
// * @since v11.10.0
// */
// enable(): boolean;
// /**
// * Disables the update interval timer. Returns `true` if the timer was
// * stopped, `false` if it was already stopped.
// * @since v11.10.0
// */
// disable(): boolean;
// }
// interface RecordableHistogram extends Histogram {
// /**
// * @since v15.9.0, v14.18.0
// * @param val The amount to record in the histogram.
// */
// record(val: number | bigint): void;
// /**
// * Calculates the amount of time (in nanoseconds) that has passed since the
// * previous call to `recordDelta()` and records that amount in the histogram.
// *
// * ## Examples
// * @since v15.9.0, v14.18.0
// */
// recordDelta(): void;
// /**
// * Adds the values from other to this histogram.
// * @since v17.4.0, v16.14.0
// * @param other Recordable Histogram to combine with
// */
// add(other: RecordableHistogram): void;
// }
/**
* _This property is an extension by Node.js. It is not available in Web browsers._
*
* Creates an `IntervalHistogram` object that samples and reports the event loop
* delay over time. The delays will be reported in nanoseconds.
*
* Using a timer to detect approximate event loop delay works because the
* execution of timers is tied specifically to the lifecycle of the libuv
* event loop. That is, a delay in the loop will cause a delay in the execution
* of the timer, and those delays are specifically what this API is intended to
* detect.
*
* ```js
* const { monitorEventLoopDelay } = require('perf_hooks');
* const h = monitorEventLoopDelay({ resolution: 20 });
* h.enable();
* // Do something.
* h.disable();
* console.log(h.min);
* console.log(h.max);
* console.log(h.mean);
* console.log(h.stddev);
* console.log(h.percentiles);
* console.log(h.percentile(50));
* console.log(h.percentile(99));
* ```
* @since v11.10.0
*/
// function monitorEventLoopDelay(options?: EventLoopMonitorOptions): IntervalHistogram;
// interface CreateHistogramOptions {
// /**
// * The minimum recordable value. Must be an integer value greater than 0.
// * @default 1
// */
// min?: number | bigint | undefined;
// /**
// * The maximum recordable value. Must be an integer value greater than min.
// * @default Number.MAX_SAFE_INTEGER
// */
// max?: number | bigint | undefined;
// /**
// * The number of accuracy digits. Must be a number between 1 and 5.
// * @default 3
// */
// figures?: number | undefined;
// }
/**
* Returns a `RecordableHistogram`.
* @since v15.9.0, v14.18.0
*/
// function createHistogram(options?: CreateHistogramOptions): RecordableHistogram;
// import { performance as _performance } from "perf_hooks";
// global {
// /**
// * `performance` is a global reference for `require('perf_hooks').performance`
// * https://nodejs.org/api/globals.html#performance
// * @since v16.0.0
// */
// var performance: typeof globalThis extends {
// onmessage: any;
// performance: infer T;
// }
// ? T
// : typeof _performance;
// }
}
declare module "node:perf_hooks" {
export * from "perf_hooks";
}

View File

@@ -33,7 +33,6 @@
* @see [source](https://github.com/nodejs/node/blob/v18.0.0/lib/readline.js)
*/
declare module "readline" {
import { Readable, Writable } from "node:stream";
import { Abortable, EventEmitter } from "node:events";
import * as promises from "node:readline/promises";
@@ -104,8 +103,8 @@ declare module "readline" {
* @see https://nodejs.org/dist/latest-v10.x/docs/api/readline.html#readline_class_interface
*/
protected constructor(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
);
@@ -345,8 +344,8 @@ declare module "readline" {
) => void;
export type CompleterResult = [string[], string];
export interface ReadLineOptions {
input: Readable;
output?: Writable | undefined;
input: ReadableStream;
output?: WritableStream | undefined;
completer?: Completer | AsyncCompleter | undefined;
terminal?: boolean | undefined;
/**
@@ -406,8 +405,8 @@ declare module "readline" {
* @since v0.1.98
*/
export function createInterface(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;
@@ -534,7 +533,7 @@ declare module "readline" {
* @since v0.7.7
*/
export function emitKeypressEvents(
stream: Readable,
stream: ReadableStream,
readlineInterface?: Interface,
): void;
export type Direction = -1 | 0 | 1;
@@ -550,7 +549,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearLine(
stream: Writable,
stream: WritableStream,
dir: Direction,
callback?: () => void,
): boolean;
@@ -562,7 +561,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function clearScreenDown(
stream: Writable,
stream: WritableStream,
callback?: () => void,
): boolean;
/**
@@ -573,7 +572,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function cursorTo(
stream: Writable,
stream: WritableStream,
x: number,
y?: number,
callback?: () => void,
@@ -689,7 +688,7 @@ declare module "readline" {
* @return `false` if `stream` wishes for the calling code to wait for the `'drain'` event to be emitted before continuing to write additional data; otherwise `true`.
*/
export function moveCursor(
stream: Writable,
stream: WritableStream,
dx: number,
dy: number,
callback?: () => void,

View File

@@ -5,7 +5,6 @@
* @since v17.0.0
*/
declare module "readline/promises" {
import { Readable, Writable } from "node:stream";
import {
Interface as _Interface,
ReadLineOptions,
@@ -57,7 +56,7 @@ declare module "readline/promises" {
/**
* @param stream A TTY stream.
*/
constructor(stream: Writable, options?: { autoCommit?: boolean });
constructor(stream: WritableStream, options?: { autoCommit?: boolean });
/**
* The `rl.clearLine()` method adds to the internal list of pending action an action that clears current line of the associated `stream` in a specified direction identified by `dir`.
* Call `rl.commit()` to see the effect of this method, unless `autoCommit: true` was passed to the constructor.
@@ -138,8 +137,8 @@ declare module "readline/promises" {
* ```
*/
function createInterface(
input: Readable,
output?: Writable,
input: ReadableStream,
output?: WritableStream,
completer?: Completer | AsyncCompleter,
terminal?: boolean,
): Interface;

View File

@@ -9,7 +9,7 @@ const BUN_VERSION = (
process.env.BUN_VERSION ||
Bun.version ||
process.versions.bun
).replace(/^.*v/, "");
).replace(/^v/, "");
const folder = resolve(process.argv.at(-1)!);
if (folder.endsWith("bundle.ts")) {
throw new Error("Pass a folder");
@@ -23,7 +23,7 @@ try {
const header = await file(join(import.meta.dir, "..", "header.txt")).text();
const filesToCat = (await getDotTsFiles("./")).filter(
f => !["./index.d.ts"].some(tf => f === tf),
(f) => !["./index.d.ts"].some((tf) => f === tf),
);
const fileContents: string[] = [];

View File

@@ -1,17 +1,3 @@
import * as c1 from "node:console";
import * as c2 from "console";
c1.log();
c2.log();
for await (const line of c1) {
console.log("Received:", line);
}
for await (const line of c2) {
console.log("Received:", line);
}
for await (const line of console) {
console.log("Received:", line);
}

View File

@@ -11,12 +11,3 @@ declare global {
expectType<"WHATEVER">(process.env.WHATEVER);
export {};
new Bun.Transpiler({
macro: {
"react-relay": {
graphql: "bun-macro-relay/bun-macro-relay.tsx",
},
},
});
Event;

Some files were not shown because too many files have changed in this diff Show More