Compare commits

..

2 Commits

Author SHA1 Message Date
Dylan Conway
b185cd134b always set exit code 2023-01-18 16:20:12 -08:00
Dylan Conway
37f72ef632 set exit code if signal is 42 2023-01-18 16:08:20 -08:00
1028 changed files with 53742 additions and 102647 deletions

View File

@@ -1,7 +1,8 @@
#!/bin/bash
curl -L https://github.com/zigtools/zls-vscode/releases/download/1.1.6/zls-vscode-1.1.6.vsix >/home/ubuntu/vscode-zig.vsix
git clone https://github.com/zigtools/zls /home/ubuntu/zls
cd /home/ubuntu/zls
git checkout 30869d7d8741656448e46fbf14f14da9ca7e5a21
git checkout aabdb0c6ecb3c9a47feff2c2bfb9be4e95adf723
git submodule update --init --recursive --progress --depth=1
zig build -Doptimize=ReleaseFast
zig build -Drelease-fast

View File

@@ -1,6 +1,6 @@
name: 📗 Documentation Issue
name: 📗 Documentation Request
description: Tell us if there is missing or incorrect documentation
labels: [docs]
labels: [documentation]
body:
- type: markdown
attributes:

View File

@@ -1,17 +0,0 @@
# redeploy Vercel site when a file in `docs` changes
# using VERCEL_DEPLOY_HOOK environment variable
name: Deploy site
on:
push:
paths:
- "docs/**"
branches: [main]
jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

47
.github/workflows/bun-dockerhub.yml vendored Normal file
View File

@@ -0,0 +1,47 @@
name: bun-dockerhub
on:
push:
paths:
- dockerhub/Dockerfile
branches:
- main
pull_request:
paths:
- dockerhub/Dockerfile
branches:
- main
release:
types:
- published
jobs:
docker:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Collect metadata
id: meta
uses: docker/metadata-action@v4
with:
images: |
${{ secrets.DOCKERHUB_USERNAME }}/bun
tags: |
type=match,pattern=bun-v(\d.\d.\d),group=1
type=match,pattern=bun-v(\d.\d),group=1
type=match,pattern=bun-v(\d),group=1
type=ref,event=branch
type=ref,event=pr
- name: Login to DockerHub
if: github.event_name == 'release'
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build image
uses: docker/build-push-action@v3
with:
context: ./dockerhub
push: ${{ github.event_name == 'release' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

30
.github/workflows/bun-homebrew.yml vendored Normal file
View File

@@ -0,0 +1,30 @@
name: bun-homebrew
on:
release:
types:
- published
- edited
jobs:
homebrew:
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh' && github.event.release.published_at != null
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ env.HOMEBREW_TOKEN }}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '2.6'
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ github.event.release.tag_name }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: Release ${{ github.event.release.tag_name }}

View File

@@ -1,130 +0,0 @@
name: bun-linux
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
TEST_TAG: bun-test'
on:
push:
branches:
- main
paths:
- "src/**/*"
- "test/**/*"
- "build.zig"
- "Makefile"
- "Dockerfile"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
matrix:
include:
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- uses: docker/setup-buildx-action@v2
id: buildx
with:
install: true
- name: Run
run: |
rm -rf ${{runner.temp}}/release
- name: Login to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
WEBKIT_BASENAME=${{matrix.webkit_basename}}
platforms: linux/${{matrix.build_arch}}
target: artifact
outputs: type=local,dest=${{runner.temp}}/release
- name: Zip
run: |
# if zip is not found
if [ ! -x "$(command -v zip)" ]; then
sudo apt-get update && sudo apt-get install -y zip --no-install-recommends
fi
if [ ! -x "$(command -v strip)" ]; then
sudo apt-get update && sudo apt-get install -y binutils --no-install-recommends
fi
cd ${{runner.temp}}/release
chmod +x bun-profile bun
mkdir bun-${{matrix.tag}}-profile
mkdir bun-${{matrix.tag}}
strip bun
mv bun-profile bun-${{matrix.tag}}-profile/bun-profile
mv bun bun-${{matrix.tag}}/bun
zip -r bun-${{matrix.tag}}-profile.zip bun-${{matrix.tag}}-profile
zip -r bun-${{matrix.tag}}.zip bun-${{matrix.tag}}
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}-profile
path: ${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"

View File

@@ -32,25 +32,32 @@ jobs:
runs-on: ${{matrix.runner}}
timeout-minutes: 90
strategy:
fail-fast: false
matrix:
include:
- cpu: haswell
tag: linux-x64
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-amd64-lto.tar.gz"
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
@@ -121,20 +128,10 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -146,76 +143,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- uses: actions/upload-artifact@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
name: bun-obj-${{matrix.tag}}
path: ${{runner.temp}}/release/bun-obj
- uses: actions/upload-artifact@v3
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1
name: ${{matrix.tag}}-dependencies
path: ${{runner.temp}}/release/bun-dependencies

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -145,7 +145,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -242,14 +242,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -257,14 +257,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -370,9 +370,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -384,75 +382,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-aarch64
runner: macos-arm64
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-obj-darwin-x64-baseline
# - cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -119,16 +119,16 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -137,7 +137,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -243,14 +243,14 @@ jobs:
strategy:
matrix:
include:
- cpu: nehalem
- cpu: westmere
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -258,14 +258,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -374,9 +374,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -388,75 +386,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -27,11 +27,11 @@ on:
jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
runs-on: zig-object
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-obj-darwin-x64-baseline
- cpu: haswell
@@ -104,13 +104,13 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -119,16 +119,16 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -137,7 +137,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -145,7 +145,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -245,14 +245,14 @@ jobs:
strategy:
matrix:
include:
# - cpu: nehalem
# - cpu: westmere
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -260,14 +260,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -376,9 +376,7 @@ jobs:
- name: Release
id: release
uses: ncipollo/release-action@v1
if: |
github.repository_owner == 'oven-sh'
&& github.ref == 'refs/heads/main'
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
@@ -390,75 +388,3 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
strategy:
fail-fast: false
matrix:
include:
- tag: bun-darwin-x64
runner: macos-11
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: download
name: Download
uses: actions/download-artifact@v3
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
message: |
❌ @${{ github.actor }} ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.test.outputs.failing_tests == '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: test-failures-${{matrix.tag}}
mode: upsert
create_if_not_exists: false
message: |
✅ test failures on ${{ matrix.tag }} have been resolved.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- id: fail
name: Fail the build
if: steps.test.outputs.failing_tests != ''
run: exit 1

View File

@@ -1,146 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary

View File

@@ -1,53 +0,0 @@
name: bun-release-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

View File

@@ -1,213 +0,0 @@
name: bun-release
concurrency: release
on:
release:
types:
- published
workflow_dispatch:
inputs:
tag:
type: string
description: The tag to publish
required: true
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: |
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>

View File

@@ -1,31 +0,0 @@
name: typecheck
on:
push:
branches: [main]
paths:
- "packages/bun-types/**"
- "test/**"
pull_request:
paths:
- "packages/bun-types/**"
- "test/**"
jobs:
tests:
name: check-tests
runs-on: ubuntu-latest
defaults:
run:
working-directory: test
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install dependencies
run: bun install
- name: Typecheck tests
run: bun run typecheck

View File

@@ -0,0 +1,60 @@
name: Release bun-types@canary
on:
push:
branches: [main]
paths:
- 'packages/bun-types/**'
jobs:
tests:
name: Build, test, publish canary
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test
- name: Set temp version
working-directory: packages/bun-types/dist
run: |
git_hash=$(git rev-parse --short "$GITHUB_SHA")
new_pkg_version="$(bun --version)-canary.${git_hash}"
echo "new_pkg_version"
echo "${new_pkg_version}"
npm version ${new_pkg_version} --no-git-tag-version
- name: Publish to NPM
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_BUN_TYPES_TOKEN }}
# dry-run: true
tag: canary
# - name: Publish on NPM
# working-directory: packages/bun-types/dist
# run: npm publish --access public --tag canary # --dry-run
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# NODE_AUTH_TOKEN: ${{ secrets.NPM_BUN_TYPES_TOKEN }}

135
.github/workflows/bun-types-release.yml vendored Normal file
View File

@@ -0,0 +1,135 @@
name: Release bun-types
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun upgrade --canary; bun install
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish to NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://registry.npmjs.org"
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish to NPM
working-directory: packages/bun-types/dist
run: npm publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://npm.pkg.github.com/"
scope: "@oven-sh"
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

View File

@@ -1,16 +1,16 @@
name: bun-types
name: Test bun-types
on:
push:
paths:
- "packages/bun-types/**"
- 'packages/bun-types/**'
branches: [main]
pull_request:
paths:
- "packages/bun-types/**"
- 'packages/bun-types/**'
jobs:
tests:
name: type-tests
name: Build and test
runs-on: ubuntu-latest
defaults:
run:
@@ -21,9 +21,10 @@ jobs:
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
@@ -31,8 +32,7 @@ jobs:
node-version: latest
- name: Install dependencies
run: |
bun install
run: bun install
- name: Generate package
run: bun run build

View File

@@ -1,76 +0,0 @@
name: prettier
on:
pull_request:
branches:
- main
- jarred/test-actions
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- id: setup
name: Setup
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- id: install
name: Install prettier
run: bun install
- name: Run prettier
id: fmt
run: |
rm -f .failed
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
if [ -s .failed ]; then
delimiter="$(openssl rand -hex 8)"
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
fi
- name: Comment on PR
if: steps.fmt.outputs.prettier_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
message: |
❌ @${{ github.actor }} `prettier` reported errors
```js
${{ steps.fmt.outputs.prettier_fmt_errs }}
```
To one-off fix this manually, run:
```sh
bun fmt
```
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.prettier_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: prettier-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `prettier` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
- name: Fail the job
if: steps.fmt.outputs.prettier_fmt_errs != ''
run: exit 1

View File

@@ -1,59 +0,0 @@
name: Run Tests Manually
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
version:
description: "Version"
required: true
default: "canary"
type: string
use_bun:
description: "Use Bun?"
required: true
default: true
type: boolean
jobs:
linux-test:
name: Tests ${{matrix.tag}} ${{github.event.inputs.version}}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: install-npm
name: Install (npm)
run: |
npm install @oven/bun-${{matrix.tag}}@${{github.event.inputs.version}}
chmod +x node_modules/@oven/bun-${{matrix.tag}}/bin/bun
sudo cp node_modules/@oven/bun-${{matrix.tag}}/bin/bun /usr/bin/bun
- id: test
name: Test
if: ${{github.event.inputs.use_bun == 'true'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
bun run test
- id: test-node-runner
name: Test (node runner)
if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
node src/runner.node.mjs

View File

@@ -1,87 +0,0 @@
name: zig-fmt
env:
ZIG_VERSION: 0.11.0-dev.1783+436e99d13
on:
pull_request:
branches:
- main
- jarred/test-actions
paths:
- "src/**/*.zig"
- "src/*.zig"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Install zig
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
delimiter="$(openssl rand -hex 8)"
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
if [ -s zig-fmt.err ]; then
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
fi
if [ -s zig-fmt.err2 ]; then
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
fi
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
- name: Comment on PR
if: steps.fmt.outputs.zig_fmt_errs != ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
message: |
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
```zig
// # zig fmt --check src/*.zig src/**/*.zig
${{ steps.fmt.outputs.zig_fmt_errs }}
```
To one-off fix this manually, run:
```sh
zig fmt src/*.zig src/**/*.zig
```
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Uncomment on PR
if: steps.fmt.outputs.zig_fmt_errs == ''
uses: thollander/actions-comment-pull-request@v2
with:
comment_tag: zig-fmt
mode: upsert
create_if_not_exists: false
message: |
✅ `zig fmt` errors have been resolved. Thank you.
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
<sup>zig v${{env.ZIG_VERSION}}</sup>
- name: Fail the job
if: steps.fmt.outputs.zig_fmt_errs != ''
run: exit 1

8
.gitignore vendored
View File

@@ -1,6 +1,7 @@
.DS_Store
zig-cache
packages/*/*.wasm
*.wasm
*.o
*.a
profile.json
@@ -109,8 +110,3 @@ misctools/machbench
bun-webkit
src/deps/c-ares/build
src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh

View File

@@ -1,12 +1,8 @@
src/fallback.html
src/bun.js/WebKit
src/bun.js/builtins/js
src/*.out.js
src/*out.*.js
src/deps
src/test/fixtures
src/react-refresh.js
# src/test
test/bun.js/solid-dom-fixtures
test/bun.js/bundled
#src/bun.js/builtins
# src/api/demo
test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts

7
.prettierrc Normal file
View File

@@ -0,0 +1,7 @@
{
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -1,15 +0,0 @@
module.exports = {
arrowParens: "avoid",
printWidth: 120,
trailingComma: "all",
useTabs: false,
quoteProps: "preserve",
overrides: [
{
files: "README.md",
options: {
printWidth: 80,
},
},
],
};

View File

@@ -2,9 +2,7 @@
"configurations": [
{
"name": "Mac",
"forcedInclude": [
"${workspaceFolder}/src/bun.js/bindings/root.h"
],
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
@@ -14,12 +12,13 @@
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
@@ -33,11 +32,12 @@
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/**",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/builtins/*",
"${workspaceFolder}/src/bun.js/builtins/cpp/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/bun.js/builtins/**",
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
"${workspaceFolder}/src/bun.js/modules/**",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps/uws/uSockets/src"

View File

@@ -1,3 +1,8 @@
{
"recommendations": ["ziglang.vscode-zig", "esbenp.prettier-vscode", "xaver.clang-format", "vadimcn.vscode-lldb"]
"recommendations": [
"AugusteRame.zls-vscode",
"esbenp.prettier-vscode",
"xaver.clang-format",
"vadimcn.vscode-lldb"
]
}

26
.vscode/launch.json generated vendored
View File

@@ -7,19 +7,7 @@
"name": "bun test",
"program": "bun-debug",
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test --watch",
"program": "bun-debug",
"args": ["--watch", "test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test/bun.js",
"env": {
"FORCE_COLOR": "1"
},
@@ -50,18 +38,6 @@
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (watch)",
"program": "bun-debug",
"args": ["--watch", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",

36
.vscode/settings.json vendored
View File

@@ -7,16 +7,13 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
"zig.buildArgs": [
"obj",
"-Dfor-editor"
],
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "ziglang.vscode-zig",
"editor.defaultFormatter": "tiehuis.zig",
"editor.formatOnSave": true
},
"[ts]": {
@@ -35,24 +32,8 @@
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
},
"[yaml]": {
"editor.formatOnSave": true
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
"diffEditor.ignoreTrimWhitespace": false,
"editor.defaultFormatter": "yzhang.markdown-all-in-one",
"editor.formatOnSave": true,
"editor.wordWrap": "on",
"editor.quickSuggestions": {
"comments": "off",
"strings": "off",
"other": "off"
}
},
"lldb.verboseLogging": false,
"files.exclude": {
"**/.git": true,
@@ -201,16 +182,9 @@
"ethernet.h": "c",
"inet.h": "c",
"packet.h": "c",
"queue": "cpp",
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp"
"queue": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "Enabled",
"eslint.workingDirectories": [
"packages/bun-types"
],
"files.insertFinalNewline": true
}
"eslint.workingDirectories": ["packages/bun-types"]
}

View File

@@ -1,86 +0,0 @@
# Contributing to Bun
All contributions need test coverage. If you are adding a new feature, please add a test. If you are fixing a bug, please add a test that fails before your fix and passes after your fix.
## Bun's codebase
Bun is written mostly in Zig, but WebKit & JavaScriptCore (the JavaScript engine) is written in C++.
Today (February 2023), Bun's codebase has five distinct parts:
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
- Package manager ([`src/install/`](src/install/))
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
The JavaScript transpiler & module resolver is mostly independent from the runtime. It predates the runtime and is entirely in Zig. The JavaScript parser is mostly in [`src/js_parser.zig`](src/js_parser.zig). The JavaScript AST data structures are mostly in [`src/js_ast.zig`](src/js_ast.zig). The JavaScript lexer is in [`src/js_lexer.zig`](src/js_lexer.zig). A lot of this code started as a port of esbuild's equivalent code from Go to Zig, but has had many small changes since then.
## Memory management in Bun
For the Zig code, please:
1. Do your best to avoid dynamically allocating memory.
2. If we need to allocate memory, carefully consider the owner of that memory. If it's a JavaScript object, it will need a finalizer. If it's in Zig, it will need to be freed either via an arena or manually.
3. Prefer arenas over manual memory management. Manually freeing memory is leak & crash prone.
4. If the memory needs to be accessed across threads, use `bun.default_allocator`. Mimalloc threadlocal heaps are not safe to free across threads.
The JavaScript transpiler has special-handling for memory management. The parser allocates into a single arena and the memory is recycled after each parse.
## JavaScript runtime
Most of Bun's JavaScript runtime code lives in [`src/bun.js`](src/bun.js).
### Calling C++ from Zig & Zig from C++
TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and [`bindings.cpp`](src/bun.js/bindings/bindings.cpp) for now)
### Adding a new JavaScript class
1. Add a new file in [`src/bun.js/*.classes.ts`](src/bun.js) to define the instance and static methods for the class.
2. Add a new file in [`src/bun.js/**/*.zig`](src/bun.js) and expose the struct in [`src/bun.js/generated_classes_list.zig`](src/bun.js/generated_classes_list.zig)
3. Run `make codegen`
Copy from examples like `Subprocess` or `Response`.
### ESM modules
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
### JavaScript Builtins
JavaScript builtins are located in [`src/bun.js/builtins/*.js`](src/bun.js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
To regenerate the builtins:
```sh
make clean-bindings && make generate-builtins && make bindings -j10
```
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)
### Strings
TODO: fill this out (for now, use `JSValue.toSlice()` in most cases)
#### JavaScriptCore C API
Do not copy from examples leveraging the JavaScriptCore C API. Please do not use this in new code. We will not accept PRs that add new code that uses the JavaScriptCore C API.
## Testing
See [`test/README.md`](test/README.md) for information on how to run tests.

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=feb9
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.1783+436e99d13"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -122,8 +122,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
@@ -149,9 +149,6 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
@@ -306,6 +303,8 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
@@ -402,7 +401,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -427,7 +425,7 @@ ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Doptimize=ReleaseFast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
@@ -461,7 +459,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -487,14 +484,10 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
@@ -522,7 +515,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR

View File

@@ -8,7 +8,7 @@ ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1783+436e99d13"
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
@@ -99,7 +99,7 @@ RUN tar -xf ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/feb25/bun-webkit-linux-$BUILDARCH.tar.gz && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null

186
Makefile
View File

@@ -35,7 +35,7 @@ DOCKER_BUILDARCH = amd64
BREW_PREFIX_PATH = /usr/local
DEFAULT_MIN_MACOS_VERSION = 10.14
MARCH_NATIVE = -march=$(CPU_TARGET) -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH = -march=nehalem
NATIVE_OR_OLD_MARCH = -march=westmere
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
@@ -147,8 +147,8 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-15-ranlib || which llvm-ranlib)
CMAKE_FLAGS = $(CMAKE_FLAGS_WITHOUT_RELEASE) -DCMAKE_BUILD_TYPE=Release
@@ -395,10 +395,6 @@ CLANG_FLAGS = $(INCLUDE_DIRS) \
-fvisibility=hidden \
-fvisibility-inlines-hidden
ifeq ($(OS_NAME),darwin)
CLANG_FLAGS += -DBUN_FAST_TLS=1 -DUSE_BUN_FAST_TLS=1 -DHAVE_BUN_FAST_TLS=1
endif
PLATFORM_LINKER_FLAGS =
SYMBOLS=
@@ -513,9 +509,6 @@ npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
print-% : ; @echo $* = $($*)
get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
@@ -569,11 +562,7 @@ c-ares:
rm -rf $(BUN_DEPS_DIR)/c-ares/build && \
mkdir $(BUN_DEPS_DIR)/c-ares/build && \
cd $(BUN_DEPS_DIR)/c-ares/build && \
cmake $(CMAKE_FLAGS) -DCMAKE_C_FLAGS="$(CFLAGS) -flto=full" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_LIBDIR=lib \
-DCARES_STATIC=ON -DCARES_STATIC_PIC=ON -DCARES_SHARED=OFF \
-G "Ninja" .. && \
cmake $(CMAKE_FLAGS) -DCMAKE_C_FLAGS="$(CFLAGS) -flto=full" -DCMAKE_BUILD_TYPE=Release -DCARES_STATIC=ON -DCARES_STATIC_PIC=ON -DCARES_SHARED=OFF -G "Ninja" .. && \
ninja && cp lib/libcares.a $(BUN_DEPS_OUT_DIR)/libcares.a
.PHONY: prepare-types
@@ -590,7 +579,8 @@ release-types:
.PHONY: format
format: ## to format the code
-$(PRETTIER) --write 'test/**/*.{js,jsx,ts,tsx}'
-$(PRETTIER) --write 'test/bun.js/*.{js,jsx,ts,tsx}'
-$(PRETTIER) --write 'test/bun.js/solid-dom-fixtures/**/*.{js,jsx,ts,tsx}'
.PHONY: lolhtml
@@ -618,7 +608,7 @@ boringssl-debug: boringssl-build-debug boringssl-copy
.PHONY: compile-ffi-test
compile-ffi-test:
clang $(OPTIMIZATION_LEVEL) -shared -undefined dynamic_lookup -o /tmp/bun-ffi-test.dylib -fPIC ./test/js/bun/ffi/ffi-test.c
clang $(OPTIMIZATION_LEVEL) -shared -undefined dynamic_lookup -o /tmp/bun-ffi-test.dylib -fPIC ./test/bun.js/ffi-test.c
sqlite:
@@ -635,7 +625,7 @@ libarchive:
.PHONY: tgz
tgz:
$(ZIG) build tgz-obj -Doptimize=ReleaseFast
$(ZIG) build tgz-obj -Drelease-fast
$(CXX) $(PACKAGE_DIR)/tgz.o -g -o ./misctools/tgz $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES)
rm -rf $(PACKAGE_DIR)/tgz.o
@@ -672,11 +662,11 @@ init-submodules:
.PHONY: build-obj
build-obj:
$(ZIG) build obj -Doptimize=ReleaseFast -Dcpu="$(CPU_TARGET)"
$(ZIG) build obj -Drelease-fast -Dcpu="$(CPU_TARGET)"
.PHONY: dev-build-obj-wasm
dev-build-obj-wasm:
$(ZIG) build bun-wasm -Dtarget=wasm32-freestanding
$(ZIG) build bun-wasm -Dtarget=wasm32-freestanding --prominent-compile-errors
.PHONY: dev-wasm
dev-wasm: dev-build-obj-wasm
@@ -689,7 +679,7 @@ dev-wasm: dev-build-obj-wasm
.PHONY: build-obj-wasm
build-obj-wasm:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
$(ZIG) build bun-wasm -Drelease-fast -Dtarget=wasm32-freestanding --prominent-compile-errors
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
@@ -699,7 +689,7 @@ build-obj-wasm:
.PHONY: build-obj-wasm-small
build-obj-wasm-small:
$(ZIG) build bun-wasm -Doptimize=ReleaseSmall -Dtarget=wasm32-freestanding
$(ZIG) build bun-wasm -Drelease-small -Dtarget=wasm32-freestanding --prominent-compile-errors
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
@@ -726,22 +716,22 @@ wasm: api build-obj-wasm-small
.PHONY: build-obj-safe
build-obj-safe:
$(ZIG) build obj -Doptimize=ReleaseSafe -Dcpu="$(CPU_TARGET)"
$(ZIG) build obj -Drelease-safe
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions -fno-rtti
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions
UWS_LDFLAGS = -I$(BUN_DEPS_DIR)/boringssl/include -I$(ZLIB_INCLUDE_DIR)
USOCKETS_DIR = $(BUN_DEPS_DIR)/uws/uSockets/
USOCKETS_SRC_DIR = $(BUN_DEPS_DIR)/uws/uSockets/src/
usockets:
rm -rf $(BUN_DEPS_DIR)/uws/uSockets/*.o $(BUN_DEPS_DIR)/uws/uSockets/**/*.o $(BUN_DEPS_DIR)/uws/uSockets/*.a $(BUN_DEPS_DIR)/uws/uSockets/*.bc
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/libusockets.a $(USOCKETS_DIR)/*.{o,bc}
uws: usockets
$(CXX_WITH_CCACHE) -O2 $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
$(CXX_WITH_CCACHE) $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
.PHONY: sign-macos-x64
sign-macos-x64:
@@ -791,7 +781,8 @@ fmt: fmt-cpp fmt-zig
api:
./node_modules/.bin/peechy --schema src/api/schema.peechy --esm src/api/schema.js --ts src/api/schema.d.ts --zig src/api/schema.zig
$(ZIG) fmt src/api/schema.zig
$(PRETTIER) --config=.prettierrc.cjs --write src/api/schema.js src/api/schema.d.ts
$(PRETTIER) --write src/api/schema.js
$(PRETTIER) --write src/api/schema.d.ts
.PHONY: node-fallbacks
node-fallbacks:
@@ -827,14 +818,14 @@ generate-install-script:
.PHONY: fetch
fetch: $(IO_FILES)
$(ZIG) build -Doptimize=ReleaseFast fetch-obj
$(ZIG) build -Drelease-fast fetch-obj
$(CXX) $(PACKAGE_DIR)/fetch.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/fetch $(IO_FILES) $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
rm -rf $(PACKAGE_DIR)/fetch.o
.PHONY: sha
sha:
$(ZIG) build -Doptimize=ReleaseFast sha-bench-obj
$(CXX) $(PACKAGE_DIR)/sha.o -I$(BUN_DEPS_DIR) -g $(OPTIMIZATION_LEVEL) -o ./misctools/sha $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
$(ZIG) build -Drelease-fast sha-bench-obj
$(CXX) $(PACKAGE_DIR)/sha.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/sha $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
rm -rf $(PACKAGE_DIR)/sha.o
.PHONY: fetch-debug
@@ -849,7 +840,7 @@ machbench-debug: $(IO_FILES)
.PHONY: machbench
machbench: $(IO_FILES)
$(ZIG) build -Doptimize=ReleaseFast machbench-obj
$(ZIG) build -Drelease-fast machbench-obj
$(CXX) $(PACKAGE_DIR)/machbench.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/machbench $(IO_FILES) $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
rm -rf $(PACKAGE_DIR)/machbench.o
@@ -861,7 +852,7 @@ httpbench-debug: $(IO_FILES)
.PHONY: httpbench-release
httpbench-release: $(IO_FILES)
$(ZIG) build -Doptimize=ReleaseFast httpbench-obj
$(ZIG) build -Drelease-fast httpbench-obj
$(CXX) $(PACKAGE_DIR)/httpbench.o -g $(OPTIMIZATION_LEVEL) -o ./misctools/httpbench $(IO_FILES) $(DEFAULT_LINKER_FLAGS) -lc $(MINIMUM_ARCHIVE_FILES)
rm -rf $(PACKAGE_DIR)/httpbench.o
@@ -931,7 +922,7 @@ headers:
rm -f /tmp/build-jsc-headers src/bun.js/bindings/headers.zig
touch src/bun.js/bindings/headers.zig
$(ZIG) build headers-obj
$(CXX) $(PLATFORM_LINKER_FLAGS) $(JSC_FILES_DEBUG) ${ICU_FLAGS} $(BUN_LLD_FLAGS_WITHOUT_JSC) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers -lc;
$(CXX) $(PLATFORM_LINKER_FLAGS) $(JSC_FILES_DEBUG) ${ICU_FLAGS} $(DEBUG_IO_FILES) $(BUN_LLD_FLAGS_WITHOUT_JSC) -g $(DEBUG_BIN)/headers.o -W -o /tmp/build-jsc-headers -lc;
/tmp/build-jsc-headers
$(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig
$(BUN_OR_NODE) misctools/headers-cleaner.js
@@ -1082,20 +1073,57 @@ release-bin-dir:
.PHONY: dev-obj
dev-obj:
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
$(ZIG) build obj --prominent-compile-errors
.PHONY: dev-obj-linux
dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
$(ZIG) build obj -Dtarget=x86_64-linux-gnu
.PHONY: dev
dev: mkdir-dev dev-obj bun-link-lld-debug
dev: mkdir-dev dev-obj bun-link-lld-debug bun-codesign-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)/bin
test-all:
$(RELEASE_BUN) test
test-install:
cd test/scripts && $(NPM_CLIENT) install
.PHONY: test-bun-dev
test-bun-dev:
BUN_BIN=$(RELEASE_BUN) bash test/apps/bun-dev.sh
BUN_BIN=$(RELEASE_BUN) bash test/apps/bun-dev-index-html.sh
.PHONY: test-dev-bun-dev
test-dev-bun-dev:
BUN_BIN=$(DEBUG_BUN) bash test/apps/bun-dev.sh
BUN_BIN=$(DEBUG_BUN) bash test/apps/bun-dev-index-html.sh
.PHONY: test-bun-snapshot
test-bun-snapshot:
rm -rf test/bun.js/snapshots.js
touch test/bun.js/snapshots.js
$(foreach i,$(wildcard test/bun.js/*.snapshot.*),echo "" >> test/bun.js/snapshots.js; echo "// $i" >> test/bun.js/snapshots.js; $(RELEASE_BUN) build $i --platform=bun >> test/bun.js/snapshots.js;)
.PHONY: test-dev-bun-snapshot
test-dev-bun-snapshot:
rm -rf test/bun.js/snapshots.debug.js
touch test/bun.js/snapshots.debug.js
$(foreach i,$(wildcard test/bun.js/*.snapshot.*),echo "" >> test/bun.js/snapshots.debug.js; echo "// $i" >> test/bun.js/snapshots.debug.js; $(DEBUG_BUN) build $i --platform=bun >> test/bun.js/snapshots.debug.js;)
.PHONY: test-bun-init
test-bun-init:
BUN_BIN=$(RELEASE_BUN) bash test/apps/bun-init-check.sh
.PHONY: test-dev-bun-init
test-dev-bun-init:
BUN_BIN=$(DEBUG_BUN) bash test/apps/bun-init-check.sh
.PHONY: test-bun-wiptest
test-bun-wiptest: test/wiptest/run
cd test/wiptest && BUN_BIN=$(DEBUG_BUN) ./run ./fixtures
.PHONY: test-all
test-all: test-install test-bun-snapshot test-with-hmr test-no-hmr test-create-next test-create-react test-bun-run test-bun-install test-bun-dev test-bun-init
.PHONY: copy-test-node-modules
copy-test-node-modules:
@@ -1106,6 +1134,73 @@ copy-test-node-modules:
kill-bun:
-killall -9 bun bun-debug
.PHONY: test-dev-create-next
test-dev-create-next:
BUN_BIN=$(DEBUG_BUN) bash test/apps/bun-create-next.sh
.PHONY: test-dev-create-react
test-dev-create-react:
BUN_BIN=$(DEBUG_BUN) bash test/apps/bun-create-react.sh
.PHONY: test-create-next
test-create-next:
BUN_BIN=$(RELEASE_BUN) bash test/apps/bun-create-next.sh
.PHONY: test-bun-run
test-bun-run:
cd test/apps && BUN_BIN=$(RELEASE_BUN) bash ./bun-run-check.sh
.PHONY: test-bun-install
test-bun-install: test-bun-install-git-status
cd test/apps && JS_RUNTIME=$(RELEASE_BUN) NPM_CLIENT=$(RELEASE_BUN) bash ./bun-install.sh
cd test/apps && BUN_BIN=$(RELEASE_BUN) bash ./bun-install-utf8.sh
.PHONY: test-bun-install-git-status
test-bun-install-git-status:
cd test/apps && JS_RUNTIME=$(RELEASE_BUN) BUN_BIN=$(RELEASE_BUN) bash ./bun-install-lockfile-status.sh
.PHONY: test-dev-bun-install
test-dev-bun-install: test-dev-bun-install-git-status
cd test/apps && JS_RUNTIME=$(DEBUG_BUN) NPM_CLIENT=$(DEBUG_BUN) bash ./bun-install.sh
cd test/apps && BUN_BIN=$(DEBUG_BUN) bash ./bun-install-utf8.sh
.PHONY: test-dev-bun-install-git-status
test-dev-bun-install-git-status:
cd test/apps && BUN_BIN=$(DEBUG_BUN) bash ./bun-install-lockfile-status.sh
.PHONY: test-create-react
test-create-react:
BUN_BIN=$(RELEASE_BUN) bash test/apps/bun-create-react.sh
.PHONY: test-with-hmr
test-with-hmr: kill-bun copy-test-node-modules
BUN_BIN=$(RELEASE_BUN) node test/scripts/browser.js
.PHONY: test-no-hmr
test-no-hmr: kill-bun copy-test-node-modules
-killall bun -9;
DISABLE_HMR="DISABLE_HMR" BUN_BIN=$(RELEASE_BUN) node test/scripts/browser.js
.PHONY: test-dev-with-hmr
test-dev-with-hmr: copy-test-node-modules
-killall bun-debug -9;
BUN_BIN=$(DEBUG_BUN) node test/scripts/browser.js
.PHONY: test-dev-no-hmr
test-dev-no-hmr: copy-test-node-modules
-killall bun-debug -9;
DISABLE_HMR="DISABLE_HMR" BUN_BIN=$(DEBUG_BUN) node test/scripts/browser.js
.PHONY: test-dev-bun-hmr
test-dev-bun-run:
cd test/apps && BUN_BIN=$(DEBUG_BUN) bash bun-run-check.sh
.PHONY: test-dev-all
test-dev-all: test-install test-dev-bun-snapshot test-dev-with-hmr test-dev-no-hmr test-dev-create-next test-dev-create-react test-dev-bun-run test-dev-bun-install test-dev-bun-dev test-dev-bun-init
test-dev-bunjs:
test-dev: test-dev-with-hmr
jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/heap/WeakHandleOwner.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/WeakHandleOwner.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/LazyClassStructureInlines.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/LazyClassStructureInlines.h
@@ -1174,7 +1269,6 @@ jsc-build-mac-compile:
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
@@ -1196,7 +1290,6 @@ jsc-build-mac-compile-lto:
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DCMAKE_C_FLAGS="-flto=full" \
-DCMAKE_CXX_FLAGS="-flto=full" \
-DENABLE_FTL_JIT=ON \
@@ -1298,19 +1391,10 @@ bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG
.PHONY: jsc-bindings-mac
jsc-bindings-mac: bindings
# lInux only
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
.PHONY: mimalloc-debug
mimalloc-debug:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; make clean || echo ""; \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} ${MIMALLOC_VALGRIND_ENABLED_FLAG} \
CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS_WITHOUT_RELEASE) ${MIMALLOC_OVERRIDE_FLAG} \
-DCMAKE_BUILD_TYPE=Debug \
-DMI_DEBUG_FULL=1 \
-DMI_SKIP_COLLECT_ON_EXIT=1 \

5304
README.md

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@@ -6,4 +6,7 @@ const arg = process.argv.slice(1);
// TODO: remove Buffer.from() when readFileSync() returns Buffer
for (let i = 0; i < count; i++) console.log(arg.map(file => Buffer.from(readFileSync(file, "utf8"))).join(""));
for (let i = 0; i < count; i++)
console.log(
arg.map((file) => Buffer.from(readFileSync(file, "utf8"))).join("")
);

View File

@@ -7,8 +7,14 @@ function runner(ready) {
for (let i = 0; i < size; i++) {
rand[i] = (Math.random() * 1024 * 1024) | 0;
}
const dest = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
const dest = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
const src = `/tmp/fs-test-copy-file-${(
(Math.random() * 10000000 + 100) |
0
).toString(32)}`;
writeFileSync(src, Buffer.from(rand.buffer), { encoding: "buffer" });
const { size: fileSize } = statSync(src);
if (fileSize !== rand.byteLength) {
@@ -29,6 +35,6 @@ runner((src, dest, rand) =>
// );
// }
// }
}),
})
);
await run();

Binary file not shown.

View File

@@ -1 +1 @@
console.log("Hello via Bun!");
console.log("Hello via Bun!");

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
);
}
await Promise.all(requests);

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
);
}
await Promise.all(requests);

View File

@@ -6,7 +6,9 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
const requests = new Array(count);
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then(r => r.text());
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text()
);
}
await Promise.all(requests);

View File

@@ -1,7 +1,8 @@
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
import { run, bench, group } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
const { napiNoop, napiHash, napiString } = require(import.meta.dir +
"/src/ffi_napi_bench.node");
const {
symbols: {

View File

@@ -1,7 +1,10 @@
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
const path = new URL(
"src/target/release/libffi_napi_bench." + extension,
import.meta.url,
).pathname;
const {
symbols: { ffi_noop, ffi_hash, ffi_string },

Binary file not shown.

View File

@@ -29,7 +29,7 @@ loop:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.zig -- ./colors.css:0 $(SLEEP_INTERVAL)
cp src/colors.css.blob $(PROJECT)/colors.css.blob
loop-emotion:
@@ -40,7 +40,7 @@ loop-emotion:
-e 'if windows is not {} then perform action "AXRaise" of item 1 of windows' \
-e 'end tell'
sleep 0.5
cd src; zig run -Doptimize=ReleaseFast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cd src; zig run -Drelease-fast ../color-looper.emotion.zig -- ./css-in-js-styles.tsx:0 $(SLEEP_INTERVAL)
cp src/css-in-js-styles.tsx.blob $(PROJECT)/css-in-js-styles.blob
process_video:

View File

@@ -24,7 +24,7 @@ if (process.env.PROJECT === "bun") {
// bunProcess.stderr.pipe(process.stderr);
// bunProcess.stdout.pipe(process.stdout);
bunProcess.once("error", err => {
bunProcess.once("error", (err) => {
console.error("❌ bun error", err);
process.exit(1);
});
@@ -32,15 +32,19 @@ if (process.env.PROJECT === "bun") {
bunProcess?.kill(0);
});
} else if (process.env.PROJECT === "next") {
const bunProcess = child_process.spawn("./node_modules/.bin/next", ["--port", "8080"], {
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
const bunProcess = child_process.spawn(
"./node_modules/.bin/next",
["--port", "8080"],
{
cwd: process.cwd(),
stdio: "ignore",
env: {
...process.env,
},
shell: false,
});
shell: false,
},
);
}
const delay = new Promise((resolve, reject) => {
@@ -107,7 +111,7 @@ async function main() {
return runPage();
}
main().catch(error =>
main().catch((error) =>
setTimeout(() => {
throw error;
}),

View File

@@ -4,7 +4,9 @@ const path = require("path");
const PROJECT = process.env.PROJECT || "bun";
const percentile = require("percentile");
const PACKAGE_NAME = process.env.PACKAGE_NAME;
const label = `${PACKAGE_NAME}@${require(PACKAGE_NAME + "/package.json").version}`;
const label = `${PACKAGE_NAME}@${
require(PACKAGE_NAME + "/package.json").version
}`;
const BASEFOLDER = path.resolve(PROJECT);
const OUTFILE = path.join(process.cwd(), process.env.OUTFILE);
@@ -18,10 +20,10 @@ const TOTAL_FRAMES = VALID_TIMES.length;
const timings = fs
.readFileSync(BASEFOLDER + "/frames.all.clean", "utf8")
.split("\n")
.map(a => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter(a => parseInt(a, 10))
.filter(a => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map(num => BigInt(num));
.map((a) => a.replace(/[Ran:'\.]?/gm, "").trim())
.filter((a) => parseInt(a, 10))
.filter((a) => a.length > 0 && VALID_TIMES.includes(BigInt(parseInt(a, 10))))
.map((num) => BigInt(num));
timings.sort();
@@ -45,7 +47,7 @@ const report = {
name: PACKAGE_NAME,
version: require(PACKAGE_NAME + "/package.json").version,
},
timestamps: timings.map(a => Number(a)),
timestamps: timings.map((a) => Number(a)),
frameTimes: frameTime,
percentileMs: {
50: percentile(50, frameTime) / 10,
@@ -65,7 +67,9 @@ fs.writeFileSync(
"." +
process.env.SLEEP_INTERVAL +
"ms." +
`${process.platform}-${process.arch === "arm64" ? "aarch64" : process.arch}` +
`${process.platform}-${
process.arch === "arm64" ? "aarch64" : process.arch
}` +
".json",
),
JSON.stringify(report, null, 2),
@@ -95,5 +99,9 @@ console.log(
timings.length,
"/",
TOTAL_FRAMES,
"(" + Math.round(Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100) + "%)",
"(" +
Math.round(
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100,
) +
"%)",
);

View File

@@ -3,7 +3,10 @@ import classNames from "classnames";
import ReactDOM from "react-dom";
const Base = ({}) => {
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
const name =
typeof location !== "undefined"
? decodeURIComponent(location.search.substring(1))
: null;
return <Main productName={name} />;
};

View File

@@ -4,8 +4,8 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<header>
<div className="Title">CSS HMR Stress Test!</div>
<p className="Description">
This page visually tests how quickly a bundler can update {props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot
Module Reloading.
This page visually tests how quickly a bundler can update{" "}
{props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
</p>
</header>
<main className="main">
@@ -19,7 +19,9 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="ProgressBar-container">
<div className="ProgressBar"></div>
</div>
<div className="SectionLabel">The progress bar should move from left to right smoothly.</div>
<div className="SectionLabel">
The progress bar should move from left to right smoothly.
</div>
</section>
<section>
@@ -40,15 +42,21 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
<div className="Spinner"></div>
</div>
</div>
<div className="SectionLabel">The spinners should rotate &amp; change color smoothly.</div>
<div className="SectionLabel">
The spinners should rotate &amp; change color smoothly.
</div>
</section>
</main>
<footer>
<div className="SectionLabel FooterLabel">There are no CSS animations on this page.</div>
<div className="SectionLabel FooterLabel">
There are no CSS animations on this page.
</div>
<div className="Bundler-container">
<div className="Bundler">{props.productName}</div>
<div className="Bundler-updateRate">{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}</div>
<div className="Bundler-updateRate">
{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}
</div>
</div>
</footer>
</>

View File

@@ -1,8 +1,12 @@
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
bench("JSON.stringify({hello: 'world'})", () =>
JSON.stringify({ hello: "world" }),
);
const otherUint8Array = new Uint8Array(1024);
bench("Uint8Array.from(otherUint8Array)", () => Uint8Array.from(otherUint8Array));
bench("Uint8Array.from(otherUint8Array)", () =>
Uint8Array.from(otherUint8Array),
);
run();

View File

@@ -1,5 +1,7 @@
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
bench("console.log({ hello: 'object' })", () =>
console.log({ hello: "object" }),
);
await run();

View File

@@ -47,7 +47,11 @@ fs.writeFileSync(
output + `/file${count}.mjs`,
`
export const THE_END = true;
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");` : ""}
${
saveStack
? `globalThis.evaluationOrder.push("${output}/file${count}.mjs");`
: ""
}
`,
"utf8",
);
@@ -56,7 +60,11 @@ fs.writeFileSync(
output + `/file${count}.js`,
`
module.exports.THE_END = true;
${saveStack ? `globalThis.evaluationOrder.push("${output}/file${count}.js");` : ""}
${
saveStack
? `globalThis.evaluationOrder.push("${output}/file${count}.js");`
: ""
}
`,
"utf8",
);

View File

@@ -1,24 +1,5 @@
import { bench, run } from "mitata";
import {
cpus,
endianness,
arch,
uptime,
networkInterfaces,
getPriority,
totalmem,
freemem,
homedir,
hostname,
loadavg,
platform,
release,
setPriority,
tmpdir,
type,
userInfo,
version,
} from "node:os";
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -1,24 +1,5 @@
import { bench, run } from "mitata";
import {
cpus,
endianness,
arch,
uptime,
networkInterfaces,
getPriority,
totalmem,
freemem,
homedir,
hostname,
loadavg,
platform,
release,
setPriority,
tmpdir,
type,
userInfo,
version,
} from "node:os";
import { cpus, endianness, arch, uptime, networkInterfaces, getPriority, totalmem, freemem, homedir, hostname, loadavg, platform, release, setPriority, tmpdir, type, userInfo, version } from "node:os";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -17,6 +17,5 @@
},
"devDependencies": {
"fast-deep-equal": "^3.1.3"
},
"prettier": "../.prettierrc.cjs"
}
}

Binary file not shown.

View File

@@ -1,16 +0,0 @@
{
"name": "react-hello-world",
"version": "1.0.0",
"description": "",
"main": "react-hello-world.node.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Colin McDonnell",
"license": "ISC",
"dependencies": {
"react": "next",
"react-dom": "next"
}
}

View File

@@ -18,8 +18,8 @@ const headers = {
};
Deno.serve(
async req => {
async (req) => {
return new Response(await renderToReadableStream(<App />), headers);
},
{ port: 8080 },
{ port: 8080 }
);

View File

@@ -1,9 +1,10 @@
// to run this:
// NODE_ENV=production bun --jsx-production react-hello-world.jsx
// bun --jsx-production react-hello-world.jsx
// Make sure you're using react-dom@18.3.0 or later.
// Currently that is available at react-dom@next (which is installed in this repository)
import { renderToReadableStream } from "react-dom/server";
// This will become the official react-dom/server.bun build a little later
// It will be the default when you import from "react-dom/server"
// That will work via the "bun" package.json export condition (which bun already supports)
import { renderToReadableStream } from "../../test/bun.js/react-dom-server.bun";
const headers = {
headers: {
"Content-Type": "text/html",

File diff suppressed because it is too large Load Diff

View File

@@ -24,7 +24,9 @@ http
onShellError(error) {
// Something errored before we could complete the shell so we emit an alternative shell.
res.statusCode = 500;
res.send('<!doctype html><p>Loading...</p><script src="clientrender.js"></script>');
res.send(
'<!doctype html><p>Loading...</p><script src="clientrender.js"></script>'
);
},
onAllReady() {
// If you don't want streaming, use this instead of onShellReady.

Binary file not shown.

View File

@@ -8,6 +8,8 @@ const transpiler = new Bun.Transpiler({
console.time("Get exports");
const ITERATIONS = parseInt(process.env.ITERATIONS || "1") || 1;
for (let i = 0; i < ITERATIONS; i++) {
const imports = transpiler.scanImports(readFileSync("remix-route.ts", "utf8"));
const imports = transpiler.scanImports(
readFileSync("remix-route.ts", "utf8")
);
}
console.timeEnd("Get exports");

View File

@@ -1,7 +1,7 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "mitata";
const identity = x => x;
const identity = (x) => x;
for (let i = 0; i < 20; i++) {
var array = new Array(i);
@@ -22,15 +22,35 @@ bench("inline Array.map x 6", () => [1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 7", () => [1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 8", () => [1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 9", () => [1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 10", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 11", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 12", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 13", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 14", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 15", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 16", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 17", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 18", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 19", () => [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity));
bench("inline Array.map x 10", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 11", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 12", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 13", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 14", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 15", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 16", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 17", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 18", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
bench("inline Array.map x 19", () =>
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1].map(identity)
);
await run();

View File

@@ -6,7 +6,10 @@ bench("deepEqual", () => {
});
bench("deepStrictEqual", () => {
assert.deepStrictEqual({ foo: "123", beep: "boop" }, { foo: "123", beep: "boop" });
assert.deepStrictEqual(
{ foo: "123", beep: "boop" },
{ foo: "123", beep: "boop" },
);
});
await run();

View File

@@ -19,10 +19,13 @@ bench("promise.nextTick", async function () {
});
bench("await new Promise(resolve => resolve())", async function () {
await new Promise(resolve => resolve());
});
bench("Promise.all(Array.from({length: 100}, () => new Promise((resolve) => resolve())))", async function () {
return Promise.all(Array.from({ length: 100 }, () => Promise.resolve(1)));
await new Promise((resolve) => resolve());
});
bench(
"Promise.all(Array.from({length: 100}, () => new Promise((resolve) => resolve())))",
async function () {
return Promise.all(Array.from({ length: 100 }, () => Promise.resolve(1)));
},
);
await run();

View File

@@ -1,49 +1,9 @@
import { bench, run } from "mitata";
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";
bench("Buffer.isBuffer(buffer)", () => {
return Buffer.isBuffer(isBuffer);
});
{
var j = 0;
j += 1;
j += eval("'ok'");
bench("Buffer.isBuffer(string)", () => {
return Buffer.isBuffer(j);
});
}
bench("Buffer.from('short string')", () => {
return Buffer.from("short string");
});
const loooong = "long string".repeat(9999).split("").join(" ");
bench("Buffer.byteLength('long string'.repeat(9999))", () => {
return Buffer.byteLength(loooong);
});
var hundred = new ArrayBuffer(100);
bench("Buffer.from(ArrayBuffer(100))", () => {
return Buffer.from(hundred);
});
var hundredArray = new Uint8Array(100);
bench("Buffer.from(Uint8Array(100))", () => {
return Buffer.from(hundredArray);
});
var empty = new Uint8Array(0);
bench("Buffer.from(Uint8Array(0))", () => {
return Buffer.from(empty);
});
bench("new Buffer(Uint8Array(0))", () => {
return new Buffer(empty);
bench("new Buffer(0)", () => {
return new Buffer(0);
});
bench(`new Buffer(${N})`, () => {
@@ -66,4 +26,4 @@ bench("Buffer.alloc(24_000)", () => {
return Buffer.alloc(24_000);
});
await run({});
await run();

View File

@@ -23,7 +23,9 @@ time("Uint8Array[]", () => array[0]);
console.log("");
time("Buffer.getBigInt64BE ", () => buf.readBigInt64BE(0));
time("DataView.getBigInt64 (BE)", () => view.getBigInt64(0, false).toString(10));
time("DataView.getBigInt64 (BE)", () =>
view.getBigInt64(0, false).toString(10)
);
console.log("");
time("Buffer.readBigInt64LE ", () => buf.readBigInt64LE(0));

View File

@@ -10,13 +10,18 @@ var time = (name, fn) => {
console.timeEnd(name.padEnd('Buffer.write(string, "latin1")'.length));
};
console.log(`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`, "\n");
console.log(
`Run ${new Intl.NumberFormat().format(INTERVAL)} times with a warmup:`,
"\n"
);
const stringToWrite = "hellooooohellooooo";
time('Buffer.write(string, "utf8")', () => buf.write(stringToWrite, "utf8"));
time('Buffer.write(string, "ascii")', () => buf.write(stringToWrite, "ascii"));
time('Buffer.write(string, "latin1")', () => buf.write(stringToWrite, "latin1"));
time('Buffer.write(string, "latin1")', () =>
buf.write(stringToWrite, "latin1")
);
time("Buffer.readBigInt64BE ", () => buf.readBigInt64BE(0));
// time("DataView.getBigInt64 (BE)", () => view.getBigInt64(0, false));
// console.log("");

Binary file not shown.

View File

@@ -35,26 +35,40 @@ async function test(log) {
{
const a = performance.now();
await tests.async(0);
if (log) console.log(`async/await: ${(report.async = (performance.now() - a).toFixed(4))}ms`);
if (log)
console.log(
`async/await: ${(report.async = (performance.now() - a).toFixed(4))}ms`
);
}
{
const a = performance.now();
tests.callback(0, function () {
if (log) console.log(`callback: ${(report.callback = (performance.now() - a).toFixed(4))}ms`);
if (log)
console.log(
`callback: ${(report.callback = (performance.now() - a).toFixed(
4
))}ms`
);
});
}
{
const a = performance.now();
await tests.then(0);
if (log) console.log(`then: ${(report.then = (performance.now() - a).toFixed(4))}ms`);
if (log)
console.log(
`then: ${(report.then = (performance.now() - a).toFixed(4))}ms`
);
}
{
const a = performance.now();
tests.sync(0);
if (log) console.log(`sync: ${(report.sync = (performance.now() - a).toFixed(4))}ms`);
if (log)
console.log(
`sync: ${(report.sync = (performance.now() - a).toFixed(4))}ms`
);
}
}

View File

@@ -34,25 +34,33 @@ function polyfillUninitialized(chunks) {
const chunkGroups = [
[Uint8Array.from([123]), Uint8Array.from([456]), Uint8Array.from([789])],
Array.from(readFileSync(import.meta.path)).map(a => Uint8Array.from([a])),
Array.from(readFileSync(import.meta.path)).map((a) => Uint8Array.from([a])),
[readFileSync(import.meta.path)],
Array.from({ length: 42 }, () => readFileSync(import.meta.path)),
Array.from({ length: 2 }, () => new TextEncoder().encode(readFileSync(import.meta.path, "utf8").repeat(100))),
Array.from({ length: 2 }, () =>
new TextEncoder().encode(readFileSync(import.meta.path, "utf8").repeat(100))
),
];
for (const chunks of chunkGroups) {
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
bench("Bun.concatArrayBuffers", () => {
Bun.concatArrayBuffers(chunks);
});
bench("Uint8Array.set", () => {
polyfill(chunks);
});
group(
`${chunks.reduce(
(prev, curr, i, a) => prev + curr.byteLength,
0
)} bytes for ${chunks.length} chunks`,
() => {
bench("Bun.concatArrayBuffers", () => {
Bun.concatArrayBuffers(chunks);
});
bench("Uint8Array.set", () => {
polyfill(chunks);
});
bench("Uint8Array.set (uninitialized memory)", () => {
polyfillUninitialized(chunks);
});
});
bench("Uint8Array.set (uninitialized memory)", () => {
polyfillUninitialized(chunks);
});
}
);
}
await run();

View File

@@ -9,7 +9,8 @@ const json = {
url: "https://api.github.com/users/wongmjane",
html_url: "https://github.com/wongmjane",
followers_url: "https://api.github.com/users/wongmjane/followers",
following_url: "https://api.github.com/users/wongmjane/following{/other_user}",
following_url:
"https://api.github.com/users/wongmjane/following{/other_user}",
gists_url: "https://api.github.com/users/wongmjane/gists{/gist_id}",
starred_url: "https://api.github.com/users/wongmjane/starred{/owner}{/repo}",
subscriptions_url: "https://api.github.com/users/wongmjane/subscriptions",
@@ -36,7 +37,11 @@ const json = {
};
const inspect =
"Bun" in globalThis ? Bun.inspect : "Deno" in globalThis ? Deno.inspect : (await import("util")).inspect;
"Bun" in globalThis
? Bun.inspect
: "Deno" in globalThis
? Deno.inspect
: (await import("util")).inspect;
bench("big json object", () => {
console.error(json);
});

View File

@@ -259,13 +259,15 @@ const fixture = [
equal: false,
},
{
description: "objects with different `toString` functions returning same values are equal",
description:
"objects with different `toString` functions returning same values are equal",
value1: { toString: () => "Hello world!" },
value2: { toString: () => "Hello world!" },
equal: true,
},
{
description: "objects with `toString` functions returning different values are not equal",
description:
"objects with `toString` functions returning different values are not equal",
value1: { toString: () => "Hello world!" },
value2: { toString: () => "Hi!" },
equal: false,
@@ -309,13 +311,13 @@ const fixture = [
{
description: "equal arrays of objects",
value1: [
...Array.from({ length: 200000 }, i => ({
...Array.from({ length: 200000 }, (i) => ({
a: 1,
b: 2,
})),
],
value2: [
...Array.from({ length: 200000 }, i => ({
...Array.from({ length: 200000 }, (i) => ({
a: 1,
b: 2,
})),
@@ -493,7 +495,9 @@ for (let { tests, description } of fixture) {
bench(equalsFn.name, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);
throw new Error(
`Expected ${expected} to be ${equal} for ${description}`,
);
}
});
}

View File

@@ -19,7 +19,14 @@ const properties = {
writable: {
get() {
const w = this._writableState;
return !!w && w.writable !== false && !w.destroyed && !w.errored && !w.ending && !w.ended;
return (
!!w &&
w.writable !== false &&
!w.destroyed &&
!w.errored &&
!w.ending &&
!w.ended
);
},
set(val) {
if (this._writableState) {

View File

@@ -7,7 +7,7 @@ bench("(cached) dns.lookup remote x 50", async () => {
const total = 50;
var remain = total;
var done;
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
@@ -25,7 +25,7 @@ bench("(cached in batch) dns.lookup remote x 50", async () => {
const total = 50;
var remain = total;
var done;
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
@@ -40,9 +40,10 @@ bench("(cached in batch) dns.lookup remote x 50", async () => {
bench("dns.lookup remote x 50", async () => {
var remain = 50;
var done;
const run = () => lookup(Math.random().toString() + ".example.com").catch(() => {});
const run = () =>
lookup(Math.random().toString() + ".example.com").catch(() => {});
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < 50; i++)
run().finally(() => {
remain--;
@@ -57,9 +58,10 @@ bench("dns.lookup remote x 50", async () => {
bench("dns.resolve remote x 50", async () => {
var remain = 50;
var done;
const run = () => resolve(Math.random().toString() + ".example.com").catch(() => {});
const run = () =>
resolve(Math.random().toString() + ".example.com").catch(() => {});
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < 50; i++)
run().finally(() => {
remain--;

View File

@@ -3,16 +3,23 @@ import { bench, run, group } from "mitata";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
for (let backend of [
"libc",
"c-ares",
process.platform === "darwin" ? "system" : "",
].filter(Boolean))
bench(backend, fn(backend));
});
}
forEachBackend("dns.lookup remote x 50", backend => async () => {
const run = () => dns.lookup(Math.random().toString(16) + ".example.com", { backend }).catch(() => {});
forEachBackend("dns.lookup remote x 50", (backend) => async () => {
const run = () =>
dns
.lookup(Math.random().toString(16) + ".example.com", { backend })
.catch(() => {});
var remain = 16;
var done;
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < 16; i++)
run().finally(() => {
remain--;
@@ -24,7 +31,7 @@ forEachBackend("dns.lookup remote x 50", backend => async () => {
});
});
forEachBackend("(cached) dns.lookup remote x 50", backend => {
forEachBackend("(cached) dns.lookup remote x 50", (backend) => {
var tld = "example.com";
const run = () => dns.lookup(tld, { backend }).catch(() => {});
@@ -32,7 +39,7 @@ forEachBackend("(cached) dns.lookup remote x 50", backend => {
const total = 50;
var remain = total;
var done;
await new Promise(resolve => {
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
@@ -45,22 +52,25 @@ forEachBackend("(cached) dns.lookup remote x 50", backend => {
};
});
forEachBackend("(cached in batch) dns.lookup remote x 50", backend => async () => {
var tld = Math.random().toString(16) + ".example.com";
const run = () => dns.lookup(tld, { backend }).catch(() => {});
const total = 50;
var remain = total;
var done;
await new Promise(resolve => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
forEachBackend(
"(cached in batch) dns.lookup remote x 50",
(backend) => async () => {
var tld = Math.random().toString(16) + ".example.com";
const run = () => dns.lookup(tld, { backend }).catch(() => {});
const total = 50;
var remain = total;
var done;
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
},
);
await run();

86
bench/snippets/emitter.js Normal file
View File

@@ -0,0 +1,86 @@
const EventEmitter = require("events").EventEmitter;
import { bench, run } from "mitata";
const emitter = new EventEmitter();
const event = new Event("hello");
emitter.on("hello", (event) => {
event.preventDefault();
});
var id = 0;
bench("EventEmitter.emit", () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench("[monkey] EventEmitter.emit", () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench("EventEmitter.on x 10_000 (handler)", () => {
var cb = (event) => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
bench("[monkey] EventEmitter.on x 10_000 (handler)", () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
var target = new EventTarget();
target.addEventListener("hello", (event) => {});
bench("EventTarget.dispatch", () => {
target.dispatchEvent(event);
});
var hey = new Event("hey");
bench("EventTarget.on x 10_000 (handler)", () => {
var handler = (event) => {};
target.addEventListener("hey", handler);
for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
target.removeEventListener("hey", handler);
});
await run();

View File

@@ -1,101 +0,0 @@
// **so this file can run in node**
import { createRequire } from "node:module";
const require = createRequire(import.meta.url);
// --
const EventEmitterNative = require("node:events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", event => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
var target = new EventTarget();
target.addEventListener("hello", event => {});
bench("EventTarget.dispatch", () => {
target.dispatchEvent(event);
});
var hey = new Event("hey");
bench("EventTarget.on x 10_000 (handler)", () => {
var handler = event => {};
target.addEventListener("hey", handler);
for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
target.removeEventListener("hey", handler);
});
await run();

View File

@@ -8,7 +8,7 @@ var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
const FIXTURE = require("fs")
.readFileSync(import.meta.dir + "/_fixture.txt", "utf8")
.split("")
.map(a => {
.map((a) => {
if (a.charCodeAt(0) > 127) {
return "a";
}
@@ -16,7 +16,10 @@ const FIXTURE = require("fs")
})
.join("");
const FIXTURE_WITH_UNICODE = require("fs").readFileSync(import.meta.dir + "/_fixture.txt", "utf8");
const FIXTURE_WITH_UNICODE = require("fs").readFileSync(
import.meta.dir + "/_fixture.txt",
"utf8"
);
// from react-dom:
const matchHtmlRegExp = /["'&<>]/;
@@ -113,7 +116,7 @@ for (let input of [
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
// bench(`he.escape`, () => heEscape(input));
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
},
}
);
}
await run();

View File

@@ -1,4 +1,13 @@
import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi";
import {
viewSource,
dlopen,
CString,
ptr,
toBuffer,
toArrayBuffer,
FFIType,
callback,
} from "bun:ffi";
import { bench, group, run } from "mitata";
const types = {
@@ -226,7 +235,9 @@ var opened;
try {
opened = dlopen("/tmp/bun-ffi-test.dylib", types);
} catch (e) {
throw new Error("Please run `make compile-ffi-test` to compile the ffi test library");
throw new Error(
"Please run `make compile-ffi-test` to compile the ffi test library"
);
}
const {
@@ -392,10 +403,10 @@ group("returns_true", () => {
group("return_a_function_ptr_to_function_that_returns_true", () => {
bench("return_a_function_ptr_to_function_that_returns_true (raw)", () =>
raw_return_a_function_ptr_to_function_that_returns_true(),
raw_return_a_function_ptr_to_function_that_returns_true()
);
bench("return_a_function_ptr_to_function_that_returns_true", () =>
return_a_function_ptr_to_function_that_returns_true(),
return_a_function_ptr_to_function_that_returns_true()
);
});
group("returns_42_float", () => {
@@ -416,7 +427,8 @@ group("identity_double", () => {
});
var raw_return_a_function_ptr_to_function_that_returns_true =
return_a_function_ptr_to_function_that_returns_true.native ?? return_a_function_ptr_to_function_that_returns_true;
return_a_function_ptr_to_function_that_returns_true.native ??
return_a_function_ptr_to_function_that_returns_true;
var raw_returns_42_float = returns_42_float.native ?? returns_42_float;
var raw_returns_42_double = returns_42_double.native ?? returns_42_double;
var raw_identity_float = identity_float.native ?? identity_float;
@@ -425,11 +437,14 @@ var raw_returns_true = returns_true.native ?? returns_true;
var raw_returns_false = returns_false.native ?? returns_false;
var raw_returns_42_char = returns_42_char.native ?? returns_42_char;
var raw_returns_42_uint8_t = returns_42_uint8_t.native ?? returns_42_uint8_t;
var raw_returns_neg_42_int8_t = returns_neg_42_int8_t.native ?? returns_neg_42_int8_t;
var raw_returns_neg_42_int8_t =
returns_neg_42_int8_t.native ?? returns_neg_42_int8_t;
var raw_returns_42_uint16_t = returns_42_uint16_t.native ?? returns_42_uint16_t;
var raw_returns_42_uint32_t = returns_42_uint32_t.native ?? returns_42_uint32_t;
var raw_returns_neg_42_int16_t = returns_neg_42_int16_t.native ?? returns_neg_42_int16_t;
var raw_returns_neg_42_int32_t = returns_neg_42_int32_t.native ?? returns_neg_42_int32_t;
var raw_returns_neg_42_int16_t =
returns_neg_42_int16_t.native ?? returns_neg_42_int16_t;
var raw_returns_neg_42_int32_t =
returns_neg_42_int32_t.native ?? returns_neg_42_int32_t;
var raw_identity_char = identity_char.native ?? identity_char;
var raw_identity_bool = identity_bool.native ?? identity_bool;
var raw_identity_bool = identity_bool.native ?? identity_bool;

View File

@@ -1,34 +0,0 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {
const data = new FormData();
data.append("foo", "bar");
data.append("baz", blob);
});
const data = new FormData();
data.append("foo", "bar");
data.append("baz", blob);
const formText =
// single field form data
"--Form\r\n" + 'Content-Disposition: form-data; name="foo"\r\n\r\n' + "bar\r\n" + "--Form--\r\n";
bench("response.formData()", async () => {
await new Response(formText, {
headers: {
"Content-Type": "multipart/form-data; boundary=Form",
},
}).formData();
});
bench("new Response(formData).text()", async () => {
await new Response(data).text();
});
bench("new Response(formData).formData()", async () => {
await new Response(data).formData();
});
await run();

View File

@@ -2,7 +2,7 @@ import { bench, run } from "mitata";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(
" ",
" "
);
bench(`Array.indexOf`, () => {

View File

@@ -1,25 +0,0 @@
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];
const noop = lazy("noop");
const fn = noop.function;
const regular = noop.functionRegular;
bench("C++ fn regular", () => {
regular();
});
bench("C++ fn", () => {
fn();
});
bench("C++ getter", () => {
return noop.getterSetter;
});
bench("C++ setter", () => {
noop.getterSetter = 1;
});
run();

View File

@@ -1,20 +1,9 @@
import { bench, run } from "mitata";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;
const noop2 = () => {};
bench("function", function () {
noopFn();
});
bench("JSC::call(() => {})", () => {
callback(noop2);
});
const bound = noop2.bind(null);
bench("bound call", () => {
bound();
noop.function();
});
bench("setter", function () {

View File

@@ -1,7 +0,0 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
},
"prettier": "../../.prettierrc.cjs"
}

View File

@@ -1,34 +1,68 @@
import { bench, group, run } from "mitata";
import { renderToReadableStream } from "react-dom/server.browser";
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
import { renderToReadableStream } from "react-dom/cjs/react-dom-server.browser.production.min";
import { renderToReadableStream as renderToReadableStreamBun } from "../test/bun.js/react-dom-server.bun";
const App = () => (
<div>
<h1>Hello, world!</h1>
<p>This is a React component This is a React component This is a React component This is a React component.</p>
<p>This is a React component This is a React component This is a React component This is a React component.</p>
<p>This is a React component This is a React component This is a React component This is a React component.</p>
<p>This is a React component This is a React component This is a React component This is a React component.</p>
<p>This is a React component This is a React component This is a React component This is a React component.</p>
<p>
This is a React component This is a React component This is a React
component This is a React component.
</p>
<p>
This is a React component This is a React component This is a React
component This is a React component.
</p>
<p>
This is a React component This is a React component This is a React
component This is a React component.
</p>
<p>
This is a React component This is a React component This is a React
component This is a React component.
</p>
<p>
This is a React component This is a React component This is a React
component This is a React component.
</p>
</div>
);
group("new Response(stream).text()", () => {
bench("react-dom/server.browser", async () => await new Response(await renderToReadableStream(<App />)).text());
bench("react-dom/server.bun", async () => await new Response(await renderToReadableStreamBun(<App />)).text());
bench(
"react-dom/server.browser",
async () => await new Response(await renderToReadableStream(<App />)).text()
);
bench(
"react-dom/server.bun",
async () =>
await new Response(await renderToReadableStreamBun(<App />)).text()
);
});
group("new Response(stream).arrayBuffer()", () => {
bench(
"react-dom/server.browser",
async () => await new Response(await renderToReadableStream(<App />)).arrayBuffer(),
async () =>
await new Response(await renderToReadableStream(<App />)).arrayBuffer()
);
bench(
"react-dom/server.bun",
async () =>
await new Response(await renderToReadableStreamBun(<App />)).arrayBuffer()
);
bench("react-dom/server.bun", async () => await new Response(await renderToReadableStreamBun(<App />)).arrayBuffer());
});
group("new Response(stream).blob()", () => {
bench("react-dom/server.browser", async () => await new Response(await renderToReadableStream(<App />)).blob());
bench("react-dom/server.bun", async () => await new Response(await renderToReadableStreamBun(<App />)).blob());
bench(
"react-dom/server.browser",
async () => await new Response(await renderToReadableStream(<App />)).blob()
);
bench(
"react-dom/server.bun",
async () =>
await new Response(await renderToReadableStreamBun(<App />)).blob()
);
});
await run();

View File

@@ -12,4 +12,5 @@ bench("return await 1", async function () {
return await 1;
});
await run();

View File

@@ -1,17 +0,0 @@
import { readdirSync, statSync } from "fs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";
const result = statSync(dir);
bench("Stat.isBlockDevice", () => result.isBlockDevice());
bench("Stat.isCharacterDevice", () => result.isCharacterDevice());
bench("Stat.isDirectory", () => result.isDirectory());
bench("Stat.isFIFO", () => result.isFIFO());
bench("Stat.isFile", () => result.isFile());
bench("Stat.isSocket", () => result.isSocket());
bench("Stat.isSymbolicLink", () => result.isSymbolicLink());
await run();

View File

@@ -4,20 +4,23 @@ var short = new TextEncoder().encode("Hello World!");
var shortUTF16 = new TextEncoder().encode("Hello World 💕💕💕");
var long = new TextEncoder().encode("Hello World!".repeat(1024));
var longUTF16 = new TextEncoder().encode("Hello World 💕💕💕".repeat(1024));
var decoder = new TextDecoder();
bench(`${short.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(short);
});
bench(`${short.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(shortUTF16);
});
bench(`${long.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(long);
});
bench(`${longUTF16.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(longUTF16);
});

View File

@@ -1,98 +1,58 @@
import { readFileSync } from "fs";
import { dirname } from "path";
import { fileURLToPath } from "url";
import { bench, run, group } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const esbuild_ = require("esbuild/lib/main");
const swc_ = require("@swc/core");
const babel_ = require("@babel/core");
const code = readFileSync(dirname(fileURLToPath(import.meta.url)) + "/../../src/test/fixtures/simple.jsx", "utf-8");
async function getWithName(name) {
let transformSync;
let transform;
let opts;
if (name === "bun") {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (name === "esbuild") {
try {
transformSync = esbuild_.transformSync;
transform = esbuild_.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (name === "swc") {
try {
transformSync = swc_.transformSync;
transform = swc_.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (name === "babel") {
try {
transformSync = babel_.transformSync;
transform = babel_.transform;
opts = {
sourceMaps: false,
presets: ["@babel/preset-react"],
};
} catch (exception) {
throw exception;
}
var transformSync;
var transform;
var opts;
if (process.isBun) {
const transpiler = new Bun.Transpiler({ loader: "jsx" });
transformSync = transpiler.transformSync.bind(transpiler);
transform = transpiler.transform.bind(transpiler);
opts = "jsx";
} else if (process.env["esbuild"]) {
try {
const esbuild = await import("esbuild");
transformSync = esbuild.transformSync;
transform = esbuild.transform;
opts = { loader: "jsx" };
} catch (exception) {
throw exception;
}
} else if (process.env["swc"]) {
try {
const swc = await import("@swc/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
inlineSourcesContent: false,
jsc: {
target: "es2022",
parser: {
jsx: true,
},
},
};
} catch (exception) {
throw exception;
}
} else if (process.env["babel"]) {
try {
const swc = await import("@babel/core");
transformSync = swc.transformSync;
transform = swc.transform;
opts = {
sourceMaps: false,
presets: [(await import("@babel/preset-react")).default],
};
} catch (exception) {
throw exception;
}
return {
transformSync,
transform,
opts,
name,
};
}
const bun = process.isBun ? await getWithName("bun") : null;
const esbuild = await getWithName("esbuild");
const swc = await getWithName("swc");
const babel = await getWithName("babel");
const code = readFileSync("src/test/fixtures/simple.jsx", "utf8");
const transpilers = [bun, esbuild, swc, babel].filter(Boolean);
group("transformSync (" + ((code.length / 1024) | 0) + " KB jsx file)", () => {
for (let { name, transformSync, opts } of transpilers) {
bench(name, () => {
transformSync(code, opts);
});
}
});
group("tranform x 5", () => {
for (let { name, transform, opts } of transpilers) {
bench(name, async () => {
return Promise.all([
transform(code, opts),
transform(code + "\n", opts),
transform("\n" + code + "\n", opts),
transform("\n" + code + "\n\n", opts),
transform("\n\n" + code + "\n\n", opts),
]);
});
}
});
await run();
if (process.env.ASYNC) {
console.log(await transform(code, opts));
} else {
console.log(transformSync(code, opts));
}

View File

@@ -1,4 +1,4 @@
import { Database } from "https://deno.land/x/sqlite3@0.8.0/mod.ts";
import { Database } from "https://deno.land/x/sqlite3@0.7.2/mod.ts";
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
const db = new Database("./src/northwind.sqlite");

Binary file not shown.

View File

@@ -1,4 +1,9 @@
const env = "process" in globalThis ? process.env : "Deno" in globalThis ? Deno.env.toObject() : {};
const env =
"process" in globalThis
? process.env
: "Deno" in globalThis
? Deno.env.toObject()
: {};
const SERVER = env.SERVER || "ws://0.0.0.0:4001";
const WebSocket = globalThis.WebSocket || (await import("ws")).WebSocket;
@@ -100,10 +105,10 @@ for (let i = 0; i < CLIENTS_TO_WAIT_FOR; i++) {
clients[i] = new WebSocket(`${SERVER}?name=${NAMES[i]}`);
promises.push(
new Promise((resolve, reject) => {
clients[i].onmessage = event => {
clients[i].onmessage = (event) => {
resolve();
};
}),
})
);
}
@@ -116,7 +121,7 @@ var more = false;
var remaining;
for (let i = 0; i < CLIENTS_TO_WAIT_FOR; i++) {
clients[i].onmessage = event => {
clients[i].onmessage = (event) => {
if (LOG_MESSAGES) console.log(event.data);
received++;
remaining--;
@@ -155,7 +160,7 @@ setInterval(() => {
received = 0;
console.log(
last,
`messages per second (${CLIENTS_TO_WAIT_FOR} clients x ${MESSAGES_TO_SEND.length} msg, min delay: ${DELAY}ms)`,
`messages per second (${CLIENTS_TO_WAIT_FOR} clients x ${MESSAGES_TO_SEND.length} msg, min delay: ${DELAY}ms)`
);
if (runs.length >= 10) {

View File

@@ -38,7 +38,9 @@ const server = Bun.serve({
if (
server.upgrade(req, {
data: {
name: new URL(req.url).searchParams.get("name") || "Client #" + (CLIENTS_TO_WAIT_FOR - remainingClients),
name:
new URL(req.url).searchParams.get("name") ||
"Client #" + (CLIENTS_TO_WAIT_FOR - remainingClients),
},
})
)
@@ -48,4 +50,7 @@ const server = Bun.serve({
},
});
console.log(`Waiting for ${remainingClients} clients to connect...\n`, ` http://${server.hostname}:${port}/`);
console.log(
`Waiting for ${remainingClients} clients to connect...\n`,
` http://${server.hostname}:${port}/`
);

View File

@@ -1,6 +1,7 @@
// See ./README.md for instructions on how to run this benchmark.
const port = Deno.env.get("PORT") || 4001;
const CLIENTS_TO_WAIT_FOR = parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 16;
const CLIENTS_TO_WAIT_FOR =
parseInt(Deno.env.get("CLIENTS_COUNT") || "", 10) || 16;
var clients = [];
async function reqHandler(req) {
@@ -12,9 +13,11 @@ async function reqHandler(req) {
clients.push(client);
const name = new URL(req.url).searchParams.get("name");
console.log(`${name} connected (${CLIENTS_TO_WAIT_FOR - clients.length} remain)`);
console.log(
`${name} connected (${CLIENTS_TO_WAIT_FOR - clients.length} remain)`
);
client.onmessage = event => {
client.onmessage = (event) => {
const msg = `${name}: ${event.data}`;
for (let client of clients) {
client.send(msg);

View File

@@ -16,8 +16,12 @@ var WebSocketServer = require("ws").Server,
var clients = [];
wss.on("connection", function (ws, { url }) {
const name = new URL(new URL(url, "http://localhost:3000")).searchParams.get("name");
console.log(`${name} connected (${CLIENTS_TO_WAIT_FOR - clients.length} remain)`);
const name = new URL(new URL(url, "http://localhost:3000")).searchParams.get(
"name"
);
console.log(
`${name} connected (${CLIENTS_TO_WAIT_FOR - clients.length} remain)`
);
clients.push(ws);
ws.on("message", function (message) {

339
build.zig
View File

@@ -1,14 +1,14 @@
const std = @import("std");
fn moduleSource(comptime out: []const u8) FileSource {
fn pkgPath(comptime out: []const u8) std.build.FileSource {
if (comptime std.fs.path.dirname(@src().file)) |base| {
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
return FileSource.relative(outpath);
return .{ .path = outpath };
} else {
return FileSource.relative(out);
return .{ .path = out };
}
}
pub fn addPicoHTTP(step: *CompileStep, comptime with_obj: bool) void {
pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void {
step.addIncludePath("src/deps");
if (with_obj) {
@@ -43,29 +43,93 @@ const color_map = std.ComptimeStringMap([]const u8, .{
&.{ "yellow", "33m" },
});
fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: []const u8, target: anytype) !void {
var bun = b.createModule(.{
.source_file = FileSource.relative("src/bun_redirect.zig"),
});
var io: *Module = brk: {
if (target.isDarwin()) {
break :brk b.createModule(.{
.source_file = FileSource.relative("src/io/io_darwin.zig"),
});
} else if (target.isLinux()) {
break :brk b.createModule(.{
.source_file = FileSource.relative("src/io/io_linux.zig"),
});
}
break :brk b.createModule(.{
.source_file = FileSource.relative("src/io/io_stub.zig"),
});
var compiler_rt_path: []const u8 = "";
var compiler_rt_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: std.mem.Allocator, zig_exe: []const u8, target: anytype) !void {
var bun = std.build.Pkg{
.name = "bun",
.source = pkgPath("src/bun_redirect.zig"),
};
step.addModule("async_io", io);
step.addModule("bun", bun);
var io_darwin: std.build.Pkg = .{
.name = "async_io",
.source = pkgPath("src/io/io_darwin.zig"),
};
var io_linux: std.build.Pkg = .{
.name = "async_io",
.source = pkgPath("src/io/io_linux.zig"),
};
var io_stub: std.build.Pkg = .{
.name = "async_io",
.source = pkgPath("src/io/io_stub.zig"),
};
var io = if (target.isDarwin())
io_darwin
else if (target.isLinux())
io_linux
else
io_stub;
var javascript_core_real: std.build.Pkg = .{
.name = "javascript_core",
.source = pkgPath("src/jsc.zig"),
};
var javascript_core_stub: std.build.Pkg = .{
.name = "javascript_core",
.source = pkgPath("src/jsc_stub.zig"),
};
var javascript_core = if (target.getOsTag() == .freestanding)
javascript_core_stub
else
javascript_core_real;
javascript_core.dependencies = &[_]std.build.Pkg{};
step.addPackage(io);
step.addPackage(bun);
const paths_to_try = .{
"{s}/../lib/compiler_rt/stack_probe.zig",
"{s}/../../lib/compiler_rt/stack_probe.zig",
"{s}/../../../lib/compiler_rt/stack_probe.zig",
"{s}/../../../../lib/compiler_rt/stack_probe.zig",
"{s}/../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../../../lib/zig/compiler_rt/stack_probe.zig",
};
var found = false;
if (compiler_rt_path.len > 0) {
const compiler_rt: std.build.Pkg = .{
.name = "compiler_rt",
.source = .{ .path = compiler_rt_path },
};
found = true;
step.addPackage(compiler_rt);
} else {
inline for (paths_to_try) |path_fmt| {
if (!found) brk: {
// workaround for https://github.com/ziglang/zig/issues/14099
const path = try std.fmt.allocPrint(allocator, path_fmt, .{zig_exe});
var target_path = std.os.realpath(
std.fs.path.resolve(allocator, &.{path}) catch break :brk,
&compiler_rt_path_buf,
) catch break :brk;
const compiler_rt: std.build.Pkg = .{
.name = "compiler_rt",
.source = .{ .path = target_path },
};
found = true;
step.addPackage(compiler_rt);
compiler_rt_path = target_path;
}
}
}
if (!found) {
std.io.getStdErr().writeAll("\nwarning: Could not find compiler_rt. This might cause a build error until https://github.com/ziglang/zig/issues/14099 is fixed.\n\n") catch {};
}
}
const BunBuildOptions = struct {
@@ -90,7 +154,7 @@ const BunBuildOptions = struct {
var output_dir: []const u8 = "";
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
var file = std.fs.cwd().openFile(filepath, .{ .optimize = .read_only }) catch |err| {
var file = std.fs.cwd().openFile(filepath, .{ .mode = .read_only }) catch |err| {
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
};
file.close();
@@ -133,17 +197,14 @@ fn updateRuntime() anyerror!void {
}
var x64 = "x64";
var optimize: std.builtin.OptimizeMode = undefined;
var mode: std.builtin.Mode = undefined;
const Build = std.Build;
const Builder = std.build.Builder;
const CrossTarget = std.zig.CrossTarget;
const OptimizeMode = std.builtin.OptimizeMode;
const CompileStep = std.build.CompileStep;
const FileSource = std.build.FileSource;
const Module = std.build.Module;
const Mode = std.builtin.Mode;
const fs = std.fs;
pub fn build(b: *Build) !void {
pub fn build(b: *std.build.Builder) !void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
@@ -151,10 +212,10 @@ pub fn build(b: *Build) !void {
var target = b.standardTargetOptions(.{});
// Standard release options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
optimize = b.standardOptimizeOption(.{});
mode = b.standardReleaseOptions();
var output_dir_buf = std.mem.zeroes([4096]u8);
var bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
var bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-" else "packages/bun-";
var triplet_buf: [64]u8 = undefined;
var os_tagname = @tagName(target.getOs().tag);
@@ -163,7 +224,11 @@ pub fn build(b: *Build) !void {
if (std.mem.eql(u8, os_tagname, "macos")) {
os_tagname = "darwin";
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
if (arch.isAARCH64()) {
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 11, .minor = 0, .patch = 0 } };
} else if (arch.isX86()) {
target.os_version_min = std.zig.CrossTarget.OsVersion{ .semver = .{ .major = 10, .minor = 14, .patch = 0 } };
}
} else if (target.isLinux()) {
target.setGnuLibCVersion(2, 27, 0);
}
@@ -193,7 +258,7 @@ pub fn build(b: *Build) !void {
}
std.fs.cwd().makePath(output_dir) catch {};
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
"src/main_wasm.zig"
else
@@ -203,32 +268,26 @@ pub fn build(b: *Build) !void {
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMin().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
else .{ .major = 0, .minor = 0, .patch = 0 };
const max_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMax().semver
else
.{ .major = 0, .minor = 0, .patch = 0 };
else .{ .major = 0, .minor = 0, .patch = 0 };
var obj_step = b.step("obj", "Build bun as a .o file");
var obj = b.addObject(.{
.name = bun_executable_name,
.root_source_file = FileSource.relative(root_src),
.target = target,
.optimize = optimize,
});
obj_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
var obj = b.addObject(bun_executable_name, root_src);
var default_build_options: BunBuildOptions = brk: {
const is_baseline = arch.isX86() and (target.cpu_model == .baseline or
!std.Target.x86.featureSetHas(target.getCpuFeatures(), .avx2));
var git_sha: [:0]const u8 = "";
if (b.env_map.get("GITHUB_SHA") orelse b.env_map.get("GIT_SHA")) |sha| {
git_sha = b.allocator.dupeZ(u8, sha) catch unreachable;
if (std.os.getenvZ("GITHUB_SHA") orelse std.os.getenvZ("GIT_SHA")) |sha| {
git_sha = std.heap.page_allocator.dupeZ(u8, sha) catch unreachable;
} else {
sha: {
const result = std.ChildProcess.exec(.{
.allocator = b.allocator,
.allocator = std.heap.page_allocator,
.argv = &.{
"git",
"rev-parse",
@@ -237,9 +296,12 @@ pub fn build(b: *Build) !void {
},
.cwd = b.pathFromRoot("."),
.expand_arg0 = .expand,
}) catch break :sha;
}) catch {
std.debug.print("Warning: failed to get git HEAD", .{});
break :sha;
};
git_sha = b.allocator.dupeZ(u8, std.mem.trim(u8, result.stdout, "\n \t")) catch unreachable;
git_sha = std.heap.page_allocator.dupeZ(u8, std.mem.trim(u8, result.stdout, "\n \t")) catch unreachable;
}
}
@@ -254,11 +316,11 @@ pub fn build(b: *Build) !void {
};
{
obj.setTarget(target);
addPicoHTTP(obj, false);
obj.setMainPkgPath(b.pathFromRoot("."));
try addInternalPackages(
b,
obj,
b.allocator,
b.zig_exe,
@@ -275,17 +337,22 @@ pub fn build(b: *Build) !void {
obj.target.cpu_model = .{ .explicit = &std.Target.aarch64.cpu.generic };
}
// we have to dump to stderr because stdout is read by zls
std.io.getStdErr().writer().print("Build {s} v{} - v{} ({s})\n", .{
triplet,
min_version,
max_version,
obj.target.getCpuModel().name,
}) catch unreachable;
std.io.getStdErr().writer().print("Output: {s}/{s}\n\n", .{ output_dir, bun_executable_name }) catch unreachable;
{
obj_step.dependOn(&b.addLog(
"Build {s} v{} - v{} ({s})\n",
.{
triplet,
min_version,
max_version,
obj.target.getCpuModel().name,
},
).step);
}
defer obj_step.dependOn(&obj.step);
obj.setBuildMode(mode);
var actual_build_options = default_build_options;
if (b.option(bool, "generate-sizes", "Generate sizes of things") orelse false) {
actual_build_options.sizegen = true;
@@ -297,11 +364,8 @@ pub fn build(b: *Build) !void {
obj.linkLibC();
obj.strip = false;
obj.bundle_compiler_rt = false;
obj.omit_frame_pointer = optimize != .Debug;
// Disable stack probing on x86 so we don't need to include compiler_rt
if (target.getCpuArch().isX86()) obj.disable_stack_probing = true;
obj.bundle_compiler_rt = true;
obj.omit_frame_pointer = mode != .Debug;
if (b.option(bool, "for-editor", "Do not emit bin, just check for errors") orelse false) {
obj.emit_bin = .no_emit;
@@ -313,16 +377,14 @@ pub fn build(b: *Build) !void {
obj.link_eh_frame_hdr = true;
obj.link_function_sections = true;
}
var log_step = b.addLog("Destination: {s}/{s}\n", .{ output_dir, bun_executable_name });
log_step.step.dependOn(&obj.step);
}
{
const headers_step = b.step("headers-obj", "Build JavaScriptCore headers");
var headers_obj = b.addObject(.{
.name = "headers",
.root_source_file = FileSource.relative("src/bindgen.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
var headers_build_options = default_build_options;
@@ -333,12 +395,7 @@ pub fn build(b: *Build) !void {
{
const wasm = b.step("bun-wasm", "Build WASM");
var wasm_step = b.addStaticLibrary(.{
.name = "bun-wasm",
.root_source_file = FileSource.relative("src/main_wasm.zig"),
.target = target,
.optimize = optimize,
});
var wasm_step: *std.build.LibExeObjStep = b.addStaticLibrary("bun-wasm", "src/main_wasm.zig");
defer wasm.dependOn(&wasm_step.step);
wasm_step.strip = false;
// wasm_step.link_function_sections = true;
@@ -349,12 +406,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("httpbench-obj", "Build HTTPBench tool (object files)");
var headers_obj = b.addObject(.{
.name = "httpbench",
.root_source_file = FileSource.relative("misctools/http_bench.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("httpbench", "misctools/http_bench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -362,12 +414,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("machbench-obj", "Build Machbench tool (object files)");
var headers_obj = b.addObject(.{
.name = "machbench",
.root_source_file = FileSource.relative("misctools/machbench.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("machbench", "misctools/machbench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -375,12 +422,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("fetch-obj", "Build fetch (object files)");
var headers_obj = b.addObject(.{
.name = "fetch",
.root_source_file = FileSource.relative("misctools/fetch.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("fetch", "misctools/fetch.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -388,12 +430,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("string-bench", "Build string bench");
var headers_obj = b.addExecutable(.{
.name = "string-bench",
.root_source_file = FileSource.relative("src/bench/string-handling.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("string-bench", "src/bench/string-handling.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -401,12 +438,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("sha-bench-obj", "Build sha bench");
var headers_obj = b.addObject(.{
.name = "sha",
.root_source_file = FileSource.relative("src/sha.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("sha", "src/sha.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -414,12 +446,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("vlq-bench", "Build vlq bench");
var headers_obj: *CompileStep = b.addExecutable(.{
.name = "vlq-bench",
.root_source_file = FileSource.relative("src/sourcemap/vlq_bench.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("vlq-bench", "src/sourcemap/vlq_bench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -427,12 +454,7 @@ pub fn build(b: *Build) !void {
{
const headers_step = b.step("tgz-obj", "Build tgz (object files)");
var headers_obj: *CompileStep = b.addObject(.{
.name = "tgz",
.root_source_file = FileSource.relative("misctools/tgz.zig"),
.target = target,
.optimize = optimize,
});
var headers_obj: *std.build.LibExeObjStep = b.addObject("tgz", "misctools/tgz.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -445,10 +467,7 @@ pub fn build(b: *Build) !void {
var test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
var test_filter = b.option([]const u8, "test-filter", "Filter for test");
var headers_obj: *CompileStep = b.addTest(.{
.root_source_file = FileSource.relative(test_file orelse "src/main.zig"),
.target = target,
});
var headers_obj: *std.build.LibExeObjStep = b.addTest(test_file orelse "src/main.zig");
headers_obj.setFilter(test_filter);
if (test_bin_) |test_bin| {
headers_obj.name = std.fs.path.basename(test_bin);
@@ -458,40 +477,40 @@ pub fn build(b: *Build) !void {
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try linkObjectFiles(b, headers_obj, target);
headers_step.dependOn(&headers_obj.step);
headers_obj.addOptions("build_options", default_build_options.step(b));
{
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{"bun"});
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "---\n\n" ++ "\x1b[0m", .{});
headers_step.dependOn(&before.step);
headers_step.dependOn(&headers_obj.step);
headers_step.dependOn(&after.step);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
// var iter = headers_obj.modules.iterator();
// while (iter.next()) |item| {
// const module = @ptrCast(*Module, item.value_ptr);
// }
// // while (headers_obj.modules.)
// for (headers_obj.packages.items) |pkg_| {
// const pkg: std.build.Pkg = pkg_;
// if (std.mem.eql(u8, pkg.name, "clap")) continue;
// var test_ = b.addTestSource(pkg.source);
for (headers_obj.packages.items) |pkg_| {
const pkg: std.build.Pkg = pkg_;
if (std.mem.eql(u8, pkg.name, "clap")) continue;
var test_ = b.addTestSource(pkg.source);
// b
// .test_.setMainPkgPath(obj.main_pkg_path.?);
// try configureObjectStep(b, test_, @TypeOf(target), target, obj.main_pkg_path.?);
// try linkObjectFiles(b, test_, target);
// test_.addOptions("build_options", default_build_options.step(b));
test_.setMainPkgPath(obj.main_pkg_path.?);
test_.setTarget(target);
try configureObjectStep(b, test_, @TypeOf(target), target, obj.main_pkg_path.?);
try linkObjectFiles(b, test_, target);
test_.addOptions("build_options", default_build_options.step(b));
// if (pkg.dependencies) |children| {
// test_.packages = std.ArrayList(std.build.Pkg).init(b.allocator);
// try test_.packages.appendSlice(children);
// }
if (pkg.dependencies) |children| {
test_.packages = std.ArrayList(std.build.Pkg).init(b.allocator);
try test_.packages.appendSlice(children);
}
// var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
// var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "---\n\n" ++ "\x1b[0m", .{});
// headers_step.dependOn(&before.step);
// headers_step.dependOn(&test_.step);
// headers_step.dependOn(&after.step);
// }
var before = b.addLog("\x1b[" ++ color_map.get("magenta").? ++ "\x1b[" ++ color_map.get("b").? ++ "[{s} tests]" ++ "\x1b[" ++ color_map.get("d").? ++ " ----\n\n" ++ "\x1b[0m", .{pkg.name});
var after = b.addLog("\x1b[" ++ color_map.get("d").? ++ "---\n\n" ++ "\x1b[0m", .{});
headers_step.dependOn(&before.step);
headers_step.dependOn(&test_.step);
headers_step.dependOn(&after.step);
}
}
if (obj.emit_bin != .no_emit)
obj.setOutputDir(output_dir);
b.default_step.dependOn(obj_step);
}
@@ -501,7 +520,7 @@ pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = nul
// we cannot use this with debugging
// so I am leaving this here for now, with the eventual intent to switch to std.build.Builder
// but it is dead code
pub fn linkObjectFiles(b: *Build, obj: *CompileStep, target: anytype) !void {
pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype) !void {
if (target.getOsTag() == .freestanding)
return;
var dirs_to_search = std.BoundedArray([]const u8, 32).init(0) catch unreachable;
@@ -558,26 +577,22 @@ pub fn linkObjectFiles(b: *Build, obj: *CompileStep, target: anytype) !void {
}
}
pub fn configureObjectStep(b: *std.build.Builder, obj: *CompileStep, comptime Target: type, target: Target, main_pkg_path: []const u8) !void {
pub fn configureObjectStep(b: *std.build.Builder, obj: *std.build.LibExeObjStep, comptime Target: type, target: Target, main_pkg_path: []const u8) !void {
obj.setMainPkgPath(main_pkg_path);
// obj.setTarget(target);
try addInternalPackages(b, obj, std.heap.page_allocator, b.zig_exe, target);
obj.setTarget(target);
try addInternalPackages(obj, std.heap.page_allocator, b.zig_exe, target);
if (target.getOsTag() != .freestanding)
addPicoHTTP(obj, false);
obj.strip = false;
obj.setOutputDir(output_dir);
// obj.setBuildMode(optimize);
obj.bundle_compiler_rt = false;
obj.setBuildMode(mode);
obj.bundle_compiler_rt = true;
if (target.getOsTag() != .freestanding) obj.linkLibC();
if (target.getOsTag() != .freestanding) obj.bundle_compiler_rt = false;
// Disable stack probing on x86 so we don't need to include compiler_rt
// Needs to be disabled here too so headers object will build without the `__zig_probe_stack` symbol
if (target.getCpuArch().isX86()) obj.disable_stack_probing = true;
if (target.getOsTag() != .freestanding) obj.bundle_compiler_rt = true;
if (target.getOsTag() == .linux) {
// obj.want_lto = tar;

BIN
bun.lockb

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More