Compare commits

..

1 Commits

Author SHA1 Message Date
Dylan Conway
374bc3a83b avoid calling toSlice() 2022-11-02 18:53:24 -07:00
991 changed files with 61109 additions and 190273 deletions

View File

@@ -1,8 +1,7 @@
#!/bin/bash
curl -L https://github.com/zigtools/zls-vscode/releases/download/1.1.6/zls-vscode-1.1.6.vsix >/home/ubuntu/vscode-zig.vsix
git clone https://github.com/zigtools/zls /home/ubuntu/zls
curl -L https://github.com/Jarred-Sumner/vscode-zig/releases/download/march18/zig-0.2.5.vsix >/home/ubuntu/vscode-zig.vsix
git clone https://github.com/oven-sh/zls /home/ubuntu/zls
cd /home/ubuntu/zls
git checkout aabdb0c6ecb3c9a47feff2c2bfb9be4e95adf723
git submodule update --init --recursive --progress --depth=1
zig build -Drelease-fast

2
.gitattributes vendored
View File

@@ -6,5 +6,3 @@ src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.lockb binary diff=lockb
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored

View File

@@ -15,7 +15,7 @@ body:
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For MacOS and Linux: copy the output of `uname -mprsv`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:

View File

@@ -1,6 +1,6 @@
name: 🐛 Bug Report
description: Report an issue that should be fixed
labels: [bug]
labels: [bug, need repro]
body:
- type: markdown
attributes:
@@ -19,14 +19,15 @@ body:
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For MacOS and Linux: copy the output of `uname -mprsv`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: What steps can reproduce the bug?
description: Explain the bug and provide a code snippet that can reproduce it.
validations:
required: true
description: Enter the details about your bug. If possible, please provide a small code snippet that can reproduce the issue.
- type: textarea
attributes:
label: How often does it reproduce? Is there a required condition?
- type: textarea
attributes:
label: What is the expected behavior?
@@ -35,6 +36,8 @@ body:
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
validations:
required: true
- type: textarea
attributes:
label: Additional information

View File

@@ -18,6 +18,8 @@ body:
- Documentation is confusing
- Example code is not working
- Something else
validations:
required: true
- type: textarea
attributes:
label: What is the issue?
@@ -26,4 +28,4 @@ body:
- type: textarea
attributes:
label: Where did you find it?
description: If possible, please provide the URL(s) where you found this issue.
description: If possible, please provide the URL(s) where you found this issue.

52
.github/labeler.yml vendored Normal file
View File

@@ -0,0 +1,52 @@
chore:
- any: ['*', '**/*']
all: ['!packages/*', '!packages/**/*', '!src/*', '!src/**/*', '!types/*', '!types/**/*', '!test/*', '!test/**/*', '!bench/*', '!bench/**/*', '!examples/*', '!examples/**/*']
'packages:bun':
- src/*
- src/**/*
'packages:bun-darwin-aarch64':
- packages/bun-darwin-aarch64/*
- packages/bun-darwin-aarch64/**/*
'packages:bun-error':
- packages/bun-error/*
- packages/bun-error/**/*
'packages:bun-framework-next':
- packages/bun-framework-next/*
- packages/bun-framework-next/**/*
'packages:bun-landing':
- packages/bun-landing/*
- packages/bun-landing/**/*
'packages:bun-linux-x64':
- packages/bun-linux-x64/*
- packages/bun-linux-x64/**/*
'packages:bun-macro-relay':
- packages/bun-macro-relay/*
- packages/bun-macro-relay/**/*
'packages:bun-types':
- packages/bun-types/*
- packages/bun-types/**/*
- types/bun/*
'packages:bun-wasm':
- packages/bun-wasm/*
- packages/bun-wasm/**/*
'tests':
- test/*
- test/**/*
'benchmarks':
- bench/*
- bench/**/*
'templates':
- examples/*
- examples/**/*

66
.github/labels.yml vendored Normal file
View File

@@ -0,0 +1,66 @@
- name: 'bug'
color: 'd73a4a'
- name: 'segfault'
color: 'b60205'
- name: 'documentation'
color: '0075ca'
- name: 'duplicate'
color: 'cfd3d7'
- name: 'wontfix'
color: 'cfd3d7'
- name: 'question'
color: 'd876e3'
- name: 'enhancement'
color: 'a2eeef'
- name: 'good first issue'
color: '7057ff'
- name: 'help wanted'
color: '008672'
- name: 'infrastructure'
color: '7c5f8a'
- name: 'invalid'
color: 'e4e669'
- name: 'need repro'
color: 'c66037'
- name: 'node.js'
color: '0E8A16'
- name: 'napi'
color: 'BE05D2'
- name: 'esm<>cjs'
color: '7dcde3'
- name: 'performance'
color: 'E99695'
- name: 'polyfill'
color: 'f9c5e6'
- name: 'tracking'
color: '5319E7'
- name: 'transpiler'
color: 'BFDADC'
- name: 'typescript'
color: '87511b'
- name: 'chore'
color: 'cfd3d7'
- name: 'templates'
color: 'FBCA04'
- name: 'benchmarks'
color: 'FBCA04'
- name: 'tests'
color: 'FBCA04'
- name: 'packages:bun'
color: 'FBCA04'
- name: 'packages:bun-darwin-aarch64'
color: 'FBCA04'
- name: 'packages:bun-error'
color: 'FBCA04'
- name: 'packages:bun-framework-next'
color: 'FBCA04'
- name: 'packages:bun-landing'
color: 'FBCA04'
- name: 'packages:bun-linux-x64'
color: 'FBCA04'
- name: 'packages:bun-macro-relay'
color: 'FBCA04'
- name: 'packages:bun-types'
color: 'FBCA04'
- name: 'packages:bun-wasm'
color: 'FBCA04'

46
.github/workflows/bun-landing.yml vendored Normal file
View File

@@ -0,0 +1,46 @@
name: bun-landing
on:
push:
paths:
- packages/bun-landing/**/*
branches: [main]
jobs:
build:
name: website build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
strategy:
fail-fast: false
matrix:
node-version:
- 16
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Use Node ${{ matrix.node }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- name: Install bun
uses: xhyrom/setup-bun@v0.1.2
with:
bun-version: latest
- name: Install global dependencies
run: bun install
- name: Install package dependencies
run: cd packages/bun-landing && bun install
- name: Build
run: cd packages/bun-landing && bun run build.tsx
- name: Commit changes
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_message: build:(landing) automated website build
token: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -6,8 +6,7 @@ env:
on:
push:
branches:
- main
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -15,8 +14,7 @@ on:
- "Makefile"
- "Dockerfile"
pull_request:
branches:
- main
branches: [main]
paths:
- "src/**/*"
- "test/**/*"
@@ -39,30 +37,27 @@ jobs:
arch: x86_64
build_arch: amd64
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: westmere
tag: linux-x64-baseline
arch: x86_64
build_arch: amd64
runner: linux-amd64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: native
tag: linux-aarch64
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -76,19 +71,16 @@ jobs:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- run: |
mkdir -p /tmp/.buildx-cache-${{matrix.tag}}
- name: Build and push
uses: docker/build-push-action@v3
with:
context: .
push: false
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
cache-from: type=gha
cache-to: type=gha,mode=max
build-args: |
ARCH=${{matrix.arch}}
BUILDARCH=${{matrix.build_arch}}
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
CPU_TARGET=${{matrix.cpu}}
WEBKIT_URL=${{matrix.webkit_url}}
GIT_SHA=${{github.sha}}
@@ -130,19 +122,17 @@ jobs:
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
tag_name: "canary"
files: |
${{runner.temp}}/release/bun-${{matrix.tag}}.zip
${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip
- uses: actions/upload-artifact@v3
with:
name: bun-obj-${{matrix.tag}}

View File

@@ -6,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -42,8 +42,8 @@ jobs:
tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -59,40 +59,21 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
tags: ghcr.io/oven-sh/bun-obj:${{github.sha}}-${{matrix.cpu}}-${{matrix.arch}}-macos
cache-from: type=gha
cache-to: type=gha,mode=min
cache-to: type=gha,mode=max
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -110,7 +91,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
@@ -119,7 +100,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: westmere
@@ -128,7 +109,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
@@ -137,7 +118,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -145,7 +126,7 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
@@ -165,16 +146,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -234,6 +209,7 @@ jobs:
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -249,7 +225,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -257,14 +233,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -282,15 +258,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -369,16 +340,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -6,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -42,8 +42,8 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -59,40 +59,21 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
tags: ghcr.io/oven-sh/bun-obj:${{github.sha}}-${{matrix.cpu}}-${{matrix.arch}}-macos-baseline
cache-from: type=gha
cache-to: type=gha,mode=min
cache-to: type=gha,mode=max
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -110,7 +91,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
@@ -119,7 +100,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: westmere
@@ -128,7 +109,7 @@ jobs:
obj: bun-obj-darwin-x64-baseline
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
@@ -137,7 +118,7 @@ jobs:
# obj: bun-obj-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -145,7 +126,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -165,23 +146,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -235,6 +203,7 @@ jobs:
with:
name: ${{ matrix.tag }}-deps
path: ${{runner.temp}}/bun-deps
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
@@ -250,7 +219,7 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
@@ -258,14 +227,14 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -283,15 +252,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -373,16 +337,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -6,7 +6,7 @@ env:
on:
push:
branches: [main]
branches: [main, bun-actions]
paths:
- "src/**/*"
- "test/**/*"
@@ -42,8 +42,8 @@ jobs:
# tag: bun-obj-darwin-aarch64
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- name: Checkout submodules
run: git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress -j $(nproc)
- uses: docker/setup-buildx-action@v2
id: buildx
with:
@@ -59,7 +59,6 @@ jobs:
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'X64'
with:
context: .
push: false
@@ -68,31 +67,12 @@ jobs:
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=amd64
BUILD_MACHINE_ARCH=x86_64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/amd64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- name: Build and push
uses: docker/build-push-action@v3
if: runner.arch == 'ARM64'
with:
context: .
push: false
cache-from: type=gha
cache-to: type=gha,mode=min
build-args: |
ARCH=${{ matrix.arch }}
BUILDARCH=arm64
BUILD_MACHINE_ARCH=aarch64
CPU_TARGET=${{ matrix.cpu }}
TRIPLET=${{matrix.arch}}-macos-none
GIT_SHA=${{github.sha}}
platforms: linux/arm64
target: build_release_obj
outputs: type=local,dest=${{runner.temp}}/release
- uses: actions/upload-artifact@v3
with:
name: ${{ matrix.tag }}
@@ -110,7 +90,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
@@ -119,7 +99,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: westmere
@@ -128,7 +108,7 @@ jobs:
# obj: bun-obj-darwin-x64-baseline
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
@@ -137,7 +117,7 @@ jobs:
obj: bun-obj-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -145,7 +125,7 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
@@ -165,10 +145,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -184,18 +164,6 @@ jobs:
rm -rf $JSC_BASE_DIR
mkdir -p $JSC_BASE_DIR
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
- name: ccache (c++)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.compile_obj
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
- name: Compile dependencies
if: matrix.dependencies
env:
@@ -252,7 +220,7 @@ jobs:
# package: bun-darwin-x64
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
@@ -260,14 +228,14 @@ jobs:
package: bun-darwin-x64
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
@@ -285,10 +253,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
brew install rust llvm@13 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
echo "export PATH=$(brew --prefix llvm@13)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@13
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -319,11 +287,6 @@ jobs:
with:
name: ${{ matrix.obj }}
path: ${{ runner.temp }}/release
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
- name: Link
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -375,16 +338,14 @@ jobs:
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
- name: Release
id: release
uses: ncipollo/release-action@v1
uses: softprops/action-gh-release@v1
if: github.ref == 'refs/heads/main'
with:
prerelease: true
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
token: ${{ secrets.GITHUB_TOKEN }}
generate_release_notes: true
body: "This is the canary release of Bun that corresponds to the commit [${{ github.sha }}]"
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
tag_name: "canary"
files: |
${{runner.temp}}/release/${{matrix.tag}}.zip
${{runner.temp}}/release/${{matrix.tag}}-profile.zip

View File

@@ -1,60 +0,0 @@
name: Release bun-types@canary
on:
push:
branches: [main]
paths:
- 'packages/bun-types/**'
jobs:
tests:
name: Build, test, publish canary
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install Bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install dependencies
run: bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test
- name: Set temp version
working-directory: packages/bun-types/dist
run: |
git_hash=$(git rev-parse --short "$GITHUB_SHA")
new_pkg_version="$(bun --version)-canary.${git_hash}"
echo "new_pkg_version"
echo "${new_pkg_version}"
npm version ${new_pkg_version} --no-git-tag-version
- name: Publish to NPM
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_BUN_TYPES_TOKEN }}
# dry-run: true
tag: canary
# - name: Publish on NPM
# working-directory: packages/bun-types/dist
# run: npm publish --access public --tag canary # --dry-run
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# NODE_AUTH_TOKEN: ${{ secrets.NPM_BUN_TYPES_TOKEN }}

View File

@@ -1,135 +0,0 @@
name: Release bun-types
on:
workflow_dispatch:
jobs:
test-build:
name: Test & Build
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun upgrade --canary; bun install
- name: Build package
run: bun run build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist/*
if-no-files-found: error
publish-npm:
name: Publish to NPM
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install Node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://registry.npmjs.org"
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Publish to NPM
working-directory: packages/bun-types/dist
run: npm publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
publish-gpr:
name: Publish on GPR
runs-on: ubuntu-latest
needs: [test-build]
if: github.repository_owner == 'oven-sh'
defaults:
run:
working-directory: packages/bun-types
steps:
- uses: actions/checkout@v3
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
registry-url: "https://npm.pkg.github.com/"
scope: "@oven-sh"
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Download all artifacts
uses: actions/download-artifact@v3
with:
name: bun-types
path: packages/bun-types/dist
- name: Add scope to name
run: bun scripts/gpr.ts
- name: Publish on GPR
run: cd dist/ && npm publish --access public
env:
NODE_AUTH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# no need for separate releases now
# create-release:
# name: Create Release
# runs-on: ubuntu-latest
# needs: [test-build]
# defaults:
# run:
# working-directory: packages/bun-types
# if: github.repository_owner == 'oven-sh'
# steps:
# - name: Download all artifacts
# uses: actions/download-artifact@v3
# with:
# name: bun-types
# path: packages/bun-types/dist
# - name: Set version
# run: echo "version=$(jq --raw-output '.version' dist/package.json)" >> $GITHUB_ENV
# - name: Create Release
# uses: softprops/action-gh-release@v0.1.14
# with:
# tag_name: "v${{ env.version }}"
# body: "This is the release of bun-types that corresponds to the commit [${{ github.sha }}]"
# token: ${{ secrets.GITHUB_TOKEN }}
# files: |
# dist/*

View File

@@ -1,41 +0,0 @@
name: Test bun-types
on:
push:
paths:
- 'packages/bun-types/**'
branches: [main]
pull_request:
paths:
- 'packages/bun-types/**'
jobs:
tests:
name: Build and test
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v0.1.8
with:
bun-version: canary
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install node
uses: actions/setup-node@v3
with:
node-version: latest
- name: Install dependencies
run: bun install
- name: Generate package
run: bun run build
- name: Tests
run: bun run test

6
.gitignore vendored
View File

@@ -104,9 +104,3 @@ src/runtime.version
*.database
*.db
misctools/machbench
*.big
.eslintcache
bun-webkit
src/deps/c-ares/build

19
.gitmodules vendored
View File

@@ -36,7 +36,14 @@ shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/boringssl"]
path = src/deps/boringssl
url = https://github.com/oven-sh/boringssl.git
url = https://github.com/google/boringssl.git
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libbacktrace"]
path = src/deps/libbacktrace
url = https://github.com/ianlancetaylor/libbacktrace
ignore = dirty
depth = 1
shallow = true
@@ -62,6 +69,10 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/oniguruma"]
path = src/deps/oniguruma
url = https://github.com/kkos/oniguruma
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false

View File

@@ -1,8 +0,0 @@
src/fallback.html
# src/test
test/bun.js/solid-dom-fixtures
test/bun.js/bundled
#src/bun.js/builtins
# src/api/demo
test/snapshots
test/snapshots-no-hmr

View File

@@ -1,7 +0,0 @@
{
"tabWidth": 2,
"useTabs": false,
"singleQuote": false,
"bracketSpacing": true,
"trailingComma": "all"
}

View File

@@ -1,9 +0,0 @@
#!/bin/bash
set -euxo pipefail
# if bun-webkit node_modules directory exists
if [ -d ./node_modules/bun-webkit ]; then
rm -f bun-webkit
# get the first matching bun-webkit-* directory name
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
fi

View File

@@ -8,10 +8,6 @@ PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
USOCKETS=$(cd src/deps/uws/uSockets && git rev-parse HEAD)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
@@ -24,9 +20,6 @@ echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig
echo "pub const tinycc = \"$TINYCC\";" >>src/generated_versions_list.zig
echo "pub const lolhtml = \"$LOLHTML\";" >>src/generated_versions_list.zig
echo "pub const c_ares = \"$C_ARES\";" >>src/generated_versions_list.zig
echo "" >>src/generated_versions_list.zig
zig fmt src/generated_versions_list.zig

View File

@@ -4,42 +4,47 @@
"name": "Mac",
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
"includePath": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/*",
"${workspaceFolder}/src/bun.js/bindings/",
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/openssl/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/algorithms/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/parameters/",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/bun.js/bindings/WebCore/",
"${workspaceFolder}/src/bun.js/WebKit/Source/bmalloc/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"browse": {
"path": [
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/bun-webkit/include/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/*",
"${workspaceFolder}/src/bun.js/bindings/**",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/builtins/**",
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
"${workspaceFolder}/src/bun.js/modules/**",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/bun.js/builtins/**",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/builtins/cpp/**",
"${workspaceFolder}/src/bun.js/WebKit/Source/bmalloc/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/openssl/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/algorithms/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/parameters/",
"${workspaceFolder}/../webkit-build/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"limitSymbolsToIncludedHeaders": true,
@@ -59,7 +64,8 @@
"macFrameworkPath": [],
"compilerPath": "/opt/homebrew/opt/llvm/bin/clang++",
"cStandard": "c17",
"cppStandard": "c++20"
"cppStandard": "c++20",
"intelliSenseMode": "macos-clang-x64"
}
],
"version": 4

72
.vscode/launch.json generated vendored
View File

@@ -7,7 +7,7 @@
"name": "bun test",
"program": "bun-debug",
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}/test/bun.js",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1"
},
@@ -21,8 +21,7 @@
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
@@ -38,18 +37,6 @@
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (hot)",
"program": "bun-debug",
"args": ["--hot", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -109,43 +96,6 @@
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["build", "--platform=bun", "--outdir=/tmp/testout", "${file}"],
"cwd": "${file}/../../",
"console": "internalConsole",
"env": {
"BUN_CONFIG_MINIFY_WHITESPACE": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bunx debug",
"program": "bun-debug",
"args": ["--bun", "x", "tsc", "--help"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
}
},
{
"type": "lldb",
"request": "launch",
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"
}
},
{
"type": "lldb",
"request": "launch",
@@ -154,24 +104,6 @@
"args": ["https://example.com", "--verbose"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "Build zig unit test",
"program": "make",
"args": ["build-unit", "${file}"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "Run zig unit test",
"program": "${workspaceFolder}/zig-out/bin/test",
"args": ["abc"],
"cwd": "${workspaceFolder}",
"console": "internalConsole"
}
]
}

14
.vscode/settings.json vendored
View File

@@ -7,13 +7,10 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
"[zig]": {
"editor.tabSize": 4,
"editor.useTabStops": false,
"editor.defaultFormatter": "tiehuis.zig",
"editor.defaultFormatter": "AugusteRame.zls-vscode",
"editor.formatOnSave": true
},
"[ts]": {
@@ -34,6 +31,7 @@
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"lldb.verboseLogging": false,
"files.exclude": {
"**/.git": true,
@@ -46,6 +44,8 @@
"**/*.xcscheme": true,
"**/*.pem": true,
"**/*.xcodeproj": true,
"packages/bun-types/*.d.ts": true,
"test/snapshots": true,
"test/snapshots-no-hmr": true,
"src/bun.js/WebKit": true,
@@ -57,8 +57,6 @@
"src/deps/uws": true,
"src/deps/zlib": true,
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
@@ -184,7 +182,5 @@
"packet.h": "c",
"queue": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "Enabled",
"eslint.workingDirectories": ["packages/bun-types"]
"cmake.configureOnOpen": false
}

View File

@@ -7,20 +7,17 @@ ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG CPU_TARGET=native
ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=jul27-2
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG ZIG_URL="https://github.com/oven-sh/zig/releases/download/$ZIG_TAG/zig-linux-$BUILDARCH.zip"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=0.5
ARG BUN_BASE_VERSION=0.2
FROM bitnami/minideb:bullseye as bun-base
@@ -28,7 +25,7 @@ RUN install_packages ca-certificates curl wget lsb-release software-properties-c
RUN wget https://apt.llvm.org/llvm.sh && \
chmod +x llvm.sh && \
./llvm.sh 15
./llvm.sh 13
RUN install_packages \
cmake \
@@ -46,11 +43,10 @@ RUN install_packages \
rsync \
ruby \
unzip \
xz-utils \
bash tar gzip ccache
bash tar gzip
ENV CXX=clang++-15
ENV CC=clang-15
ENV CXX=clang++-13
ENV CC=clang-13
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages nodejs && \
@@ -67,15 +63,13 @@ ARG BUILDARCH
ARG ZIG_PATH
ARG WEBKIT_URL
ARG ZIG_URL
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ENV WEBKIT_OUT_DIR=${WEBKIT_DIR}
ENV BUILDARCH=${BUILDARCH}
ENV AR=/usr/bin/llvm-ar-15
ENV AR=/usr/bin/llvm-ar-13
ENV ZIG "${ZIG_PATH}/zig"
ENV PATH="$ZIG/bin:$PATH"
ENV LD=lld-15
ENV LD=lld-13
RUN mkdir -p $BUN_DIR $BUN_DEPS_OUT_DIR
@@ -84,8 +78,8 @@ FROM bun-base as bun-base-with-zig-and-webkit
WORKDIR $GITHUB_WORKSPACE
ADD $ZIG_URL .
RUN tar xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && mv ${ZIG_FOLDERNAME} zig;
RUN unzip -q zig-linux-$BUILDARCH.zip && \
rm zig-linux-$BUILDARCH.zip;
@@ -111,30 +105,6 @@ RUN mkdir -p ${WEBKIT_DIR} && cd ${GITHUB_WORKSPACE} && \
LABEL org.opencontainers.image.title="bun base image with zig & webkit ${BUILDARCH} (glibc)"
LABEL org.opencontainers.image.source=https://github.com/oven-sh/bun
FROM bun-base as c-ares
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make c-ares && rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
FROM bun-base as lolhtml
@@ -152,9 +122,7 @@ ARG BUN_DIR
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-15) && cd ${BUN_DIR} && \
RUN export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-13) && cd ${BUN_DIR} && \
make lolhtml && rm -rf src/deps/lol-html Makefile
FROM bun-base as mimalloc
@@ -173,9 +141,7 @@ COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && \
RUN cd ${BUN_DIR} && \
make mimalloc && rm -rf src/deps/mimalloc Makefile
FROM bun-base as zlib
@@ -196,9 +162,7 @@ COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
RUN cd $BUN_DIR && \
make zlib && rm -rf src/deps/zlib Makefile
FROM bun-base as libarchive
@@ -219,14 +183,29 @@ RUN install_packages autoconf automake libtool pkg-config
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
ENV CCACHE_DIR=/ccache
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && \
RUN cd $BUN_DIR && \
make libarchive && rm -rf src/deps/libarchive Makefile
FROM bun-base as oniguruma
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages autoconf automake libtool pkg-config
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/oniguruma ${BUN_DIR}/src/deps/oniguruma
WORKDIR $BUN_DIR
RUN make oniguruma && rm -rf src/deps/oniguruma Makefile
FROM bun-base as tinycc
@@ -243,6 +222,27 @@ ENV CPU_TARGET=${CPU_TARGET}
RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR}
FROM bun-base as libbacktrace
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/libbacktrace ${BUN_DIR}/src/deps/libbacktrace
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make libbacktrace && rm -rf src/deps/libbacktrace Makefile
FROM bun-base as boringssl
RUN install_packages golang
@@ -263,9 +263,27 @@ COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
ENV CCACHE_DIR=/ccache
RUN make boringssl && rm -rf src/deps/boringssl Makefile
RUN --mount=type=cache,target=/ccache cd ${BUN_DIR} && make boringssl && rm -rf src/deps/boringssl Makefile
FROM bun-base as base64
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/base64 ${BUN_DIR}/src/base64
WORKDIR $BUN_DIR
RUN make base64 && rm -rf src/base64 Makefile
FROM bun-base as uws
@@ -303,8 +321,6 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
@@ -376,12 +392,11 @@ ENV CPU_TARGET=${CPU_TARGET}
WORKDIR $BUN_DIR
COPY ./root.zig ${BUN_DIR}/root.zig
COPY ./src ${BUN_DIR}/src
COPY ./build.zig ${BUN_DIR}/build.zig
COPY ./completions ${BUN_DIR}/completions
COPY ./packages ${BUN_DIR}/packages
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./build-id ${BUN_DIR}/build-id
COPY ./package.json ${BUN_DIR}/package.json
COPY ./misctools ${BUN_DIR}/misctools
COPY Makefile ${BUN_DIR}/Makefile
@@ -419,14 +434,12 @@ ENV GIT_SHA=${GIT_SHA}
COPY --from=identifier_cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
COPY --from=node_fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
COPY ./src/build-id ${BUN_DIR}/src/build-id
COPY ./build-id ${BUN_DIR}/build-id
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
RUN cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && make prerelease && \
mkdir -p $BUN_RELEASE_DIR && \
OUTPUT_DIR=/tmp/bun-${TRIPLET}-${GIT_SHA} $ZIG_PATH/zig build obj -Doutput-dir=/tmp/bun-${TRIPLET}-${GIT_SHA} -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun-${TRIPLET}-${GIT_SHA}/bun.o /tmp/bun-${TRIPLET}-${GIT_SHA}/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/src/build-id).o && cd / && rm -rf $BUN_DIR
OUTPUT_DIR=/tmp $ZIG_PATH/zig build obj -Drelease-fast -Dtarget="${TRIPLET}" -Dcpu="${CPU_TARGET}" && \
cp /tmp/bun.o /tmp/bun-${BUN_BASE_VERSION}.$(cat ${BUN_DIR}/build-id).o && cd / && rm -rf $BUN_DIR
FROM scratch as build_release_obj
@@ -437,13 +450,12 @@ ARG ZIG_PATH
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG GIT_SHA
ARG TRIPLET
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY --from=compile_release_obj /tmp/bun-${TRIPLET}-${GIT_SHA}/*.o /
COPY --from=compile_release_obj /tmp/*.o /
FROM prepare_release as compile_cpp
@@ -465,15 +477,26 @@ WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=oniguruma ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
# Required for `make webcrypto`
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && make webcrypto && \
RUN cd $BUN_DIR && mkdir -p src/bun.js/bindings-obj && rm -rf $HOME/.cache zig-cache && mkdir -p $BUN_RELEASE_DIR && make webcrypto && \
make release-bindings -j10 && mv ${BUN_DEPS_OUT_DIR}/libwebcrypto.a /tmp && mv src/bun.js/bindings-obj/* /tmp
FROM bun-base as sqlite
FROM prepare_release as sqlite
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
@@ -484,17 +507,14 @@ ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/bun.js/bindings/sqlite ${BUN_DIR}/src/bun.js/bindings/sqlite
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
RUN cd $BUN_DIR && make sqlite
FROM scratch as build_release_cpp
@@ -523,16 +543,17 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libbacktrace ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=oniguruma ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=build_release_obj /*.o /tmp
COPY --from=build_release_cpp /*.o ${BUN_DIR}/src/bun.js/bindings-obj/
COPY --from=build_release_cpp /*.a ${BUN_DEPS_OUT_DIR}/

View File

@@ -1,18 +1,12 @@
ARG DEBIAN_FRONTEND=noninteractive
ARG GITHUB_WORKSPACE=/build
ARG BUILDARCH=amd64
ARG ARCH=x86_64
ARG ZIG_PATH=${GITHUB_WORKSPACE}/zig
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
ARG BUN_PACKAGES_DIR=${BUN_DIR}/packages
ARG ZIG_VERSION="0.11.0-dev.1314+9856bea34"
ARG ZIG_FOLDERNAME=zig-linux-${ARCH}-${ZIG_VERSION}
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
FROM --platform=linux/${BUILDARCH} ubuntu:22.04 as bun.devcontainer
@@ -43,7 +37,7 @@ RUN apt-get update && \
add-apt-repository ppa:longsleep/golang-backports && \
wget https://apt.llvm.org/llvm.sh --no-check-certificate && \
chmod +x llvm.sh && \
./llvm.sh 15 && \
./llvm.sh 13 && \
curl -fsSL https://deb.nodesource.com/setup_16.x | bash - && \
apt-get update && \
apt-get install --no-install-recommends -y \
@@ -56,16 +50,16 @@ RUN apt-get update && \
git \
libssl-dev \
ruby \
liblld-15-dev \
libclang-15-dev \
liblld-13-dev \
libclang-13-dev \
nodejs \
gcc \
g++ \
clang-15 \
clang-format-15 \
libc++-15-dev \
libc++abi-15-dev \
lld-15 \
clang-13 \
clang-format-13 \
libc++-13-dev \
libc++abi-13-dev \
lld-13 \
libicu-dev \
wget \
rustc \
@@ -73,38 +67,35 @@ RUN apt-get update && \
unzip \
tar \
golang-go ninja-build pkg-config automake autoconf libtool curl && \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-15 90 && \
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-15 90 && \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-15 90 && \
update-alternatives --install /usr/bin/cc cc /usr/bin/clang-13 90 && \
update-alternatives --install /usr/bin/cpp cpp /usr/bin/clang++-13 90 && \
update-alternatives --install /usr/bin/c++ c++ /usr/bin/clang++-13 90 && \
npm install -g esbuild
ENV CC=clang-15
ENV CXX=clang++-15
ENV CC=clang-13
ENV CXX=clang++-13
ENV ZIG "${ZIG_PATH}/zig"
WORKDIR $GITHUB_WORKSPACE
RUN cd / && mkdir -p $BUN_RELEASE_DIR $BUN_DEPS_OUT_DIR ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
WORKDIR $GITHUB_WORKSPACE
ARG ZIG_FOLDERNAME
ARG ZIG_FILENAME
ARG ZIG_URL
ADD $ZIG_URL .
RUN tar -xf ${ZIG_FILENAME} && \
rm ${ZIG_FILENAME} && \
mv ${ZIG_FOLDERNAME} ${ZIG_PATH};
RUN cd $GITHUB_WORKSPACE && \
curl -o zig-linux-$BUILDARCH.zip -L https://github.com/oven-sh/zig/releases/download/jul1/zig-linux-$BUILDARCH.zip && \
unzip -q zig-linux-$BUILDARCH.zip && \
rm zig-linux-$BUILDARCH.zip;
RUN cd $GITHUB_WORKSPACE && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/dec30/bun-webkit-linux-$BUILDARCH.tar.gz && \
curl -o bun-webkit-linux-$BUILDARCH.tar.gz -L https://github.com/oven-sh/WebKit/releases/download/oct28/bun-webkit-linux-$BUILDARCH.tar.gz && \
tar -xzf bun-webkit-linux-$BUILDARCH.tar.gz && \
rm bun-webkit-linux-$BUILDARCH.tar.gz && \
cat $WEBKIT_OUT_DIR/include/cmakeconfig.h > /dev/null
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-15 90
RUN apt-get -y update && update-alternatives --install /usr/bin/lldb lldb /usr/bin/lldb-13 90
COPY .devcontainer/workspace.code-workspace $GITHUB_WORKSPACE/workspace.code-workspace
COPY .devcontainer/zls.json $GITHUB_WORKSPACE/workspace.code-workspace

267
Makefile
View File

@@ -4,10 +4,6 @@ OS_NAME := $(shell uname -s | tr '[:upper:]' '[:lower:]')
ARCH_NAME_RAW := $(shell uname -m)
BUN_AUTO_UPDATER_REPO = Jarred-Sumner/bun-releases-for-updater
CMAKE_CXX_COMPILER_LAUNCHER_FLAG :=
# 'make' command will trigger the help target
.DEFAULT_GOAL := help
@@ -39,7 +35,7 @@ NATIVE_OR_OLD_MARCH = -march=westmere
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 0.5
BUN_BASE_VERSION = 0.2
AR=
@@ -54,12 +50,12 @@ DEBUG_PACKAGE_DIR = $(PACKAGES_REALPATH)/debug-$(PACKAGE_NAME)
RELEASE_BUN = $(PACKAGE_DIR)/bun
DEBUG_BIN = $(DEBUG_PACKAGE_DIR)/
DEBUG_BUN = $(DEBUG_BIN)/bun-debug
BUILD_ID = $(shell cat ./src/build-id)
BUILD_ID = $(shell cat ./build-id)
PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
PRETTIER ?= $(shell which prettier || echo "./node_modules/.bin/prettier")
DSYMUTIL ?= $(shell which dsymutil || which dsymutil-15)
DSYMUTIL ?= $(shell which dsymutil || which dsymutil-13)
WEBKIT_DIR ?= $(realpath src/bun.js/WebKit)
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug
@@ -74,39 +70,20 @@ ZIG ?= $(shell which zig || echo -e "error: Missing zig. Please make sure zig is
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-15 || which clang)
REAL_CXX = $(shell which clang++-15 || which clang++)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
CCACHE_CC_OR_CC := $(REAL_CC)
CCACHE_PATH := $(shell which ccache 2>/dev/null)
CCACHE_CC_FLAG = CC=$(CCACHE_CC_OR_CC)
ifeq (,$(findstring,$(shell which ccache),ccache))
CMAKE_CXX_COMPILER_LAUNCHER_FLAG := -DCMAKE_CXX_COMPILER_LAUNCHER=$(CCACHE_PATH) -DCMAKE_C_COMPILER_LAUNCHER=$(CCACHE_PATH)
CCACHE_CC_OR_CC := "$(CCACHE_PATH) $(REAL_CC)"
export CCACHE_COMPILERTYPE = clang
CCACHE_CC_FLAG = CC=$(CCACHE_CC_OR_CC) CCACHE_COMPILER=$(REAL_CC)
CCACHE_CXX_FLAG = CXX=$(CCACHE_PATH) CCACHE_COMPILER=$(REAL_CXX)
endif
CXX_WITH_CCACHE = $(CCACHE_PATH) $(CXX)
CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
CC = $(shell which clang-13 || which clang)
CXX = $(shell which clang++-13 || which clang++)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@15)
LLVM_PREFIX = $(shell brew --prefix llvm@13)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@15' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@13' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -141,15 +118,7 @@ LIBICONV_PATH ?= $(BREW_PREFIX_PATH)/opt/libiconv/lib/libiconv.a
OPENSSL_LINUX_DIR = $(BUN_DEPS_DIR)/openssl/openssl-OpenSSL_1_1_1l
CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_CXX_COMPILER=$(CXX) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-15-ranlib || which llvm-ranlib)
CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) -DCMAKE_CXX_COMPILER=$(CXX) -DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION)
CMAKE_FLAGS = $(CMAKE_FLAGS_WITHOUT_RELEASE) -DCMAKE_BUILD_TYPE=Release
# SQLite3 is dynamically linked on macOS
@@ -158,23 +127,24 @@ CMAKE_FLAGS = $(CMAKE_FLAGS_WITHOUT_RELEASE) -DCMAKE_BUILD_TYPE=Release
SQLITE_OBJECT =
BITCODE_OR_SECTIONS=
EMBED_OR_EMIT_BITCODE=
LIBTOOL=libtoolize
ifeq ($(OS_NAME),darwin)
LIBTOOL=glibtoolize
AR=$(LLVM_PREFIX)/bin/llvm-ar
BITCODE_OR_SECTIONS=
BITCODE_OR_SECTIONS=-fembed-bitcode
endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-15 || which llvm-ar || which ar)
AR = $(shell which llvm-ar-13 || which llvm-ar || which ar)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE)
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(BITCODE_OR_SECTIONS) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(EMBED_OR_EMIT_BITCODE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_TMP_DIR := /tmp/make-bun
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
@@ -183,26 +153,18 @@ DEFAULT_USE_BMALLOC := 1
USE_BMALLOC ?= DEFAULT_USE_BMALLOC
# Set via postinstall
AUTO_JSX_BASE_DIR ?= $(realpath $(firstword $(wildcard bun-webkit)))
ifeq (,$(AUTO_JSX_BASE_DIR))
AUTO_JSX_BASE_DIR ?= $(HOME)/webkit-build
endif
JSC_BASE_DIR ?= $(AUTO_JSX_BASE_DIR)
JSC_BASE_DIR ?= ${HOME}/webkit-build
DEFAULT_JSC_LIB :=
DEFAULT_JSC_LIB_DEBUG :=
ifeq ($(OS_NAME),linux)
DEFAULT_JSC_LIB = $(JSC_BASE_DIR)/lib
DEFAULT_JSC_LIB_DEBUG = $(DEFAULT_JSC_LIB)
ifneq (,$(realpath $(WEBKIT_RELEASE_DIR_LTO)/lib))
DEFAULT_JSC_LIB = $(WEBKIT_RELEASE_DIR_LTO)/lib
endif
ifneq (,$(realpath $(WEBKIT_RELEASE_DIR)/lib))
ifeq ($(OS_NAME),darwin)
DEFAULT_JSC_LIB = $(WEBKIT_RELEASE_DIR_LTO)/lib
DEFAULT_JSC_LIB_DEBUG = $(WEBKIT_RELEASE_DIR)/lib
endif
@@ -266,7 +228,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip || which llvm-strip-15 || which strip || echo "Missing strip")
STRIP=$(shell which llvm-strip || which llvm-strip-13 || which strip || echo "Missing strip")
endif
@@ -338,7 +300,7 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include
ifeq ($(OS_NAME),linux)
@@ -405,17 +367,17 @@ ifeq ($(OS_NAME), darwin)
SYMBOLS=-exported_symbols_list $(realpath src/symbols.txt)
PLATFORM_LINKER_FLAGS += -DDU_DISABLE_RENAMING=1 \
-lstdc++ \
-fno-keep-static-consts -lresolv
-fno-keep-static-consts
endif
ifeq ($(OS_NAME),linux)
SYMBOLS=-Wl,--dynamic-list $(realpath src/symbols.dyn) -Wl,--version-script=$(realpath src/linker.lds)
SYMBOLS=-Wl,--dynamic-list $(realpath src/symbols.dyn)
endif
SHARED_LIB_EXTENSION = .so
JSC_BINDINGS = $(BINDINGS_OBJ) $(JSC_FILES)
JSC_BINDINGS_DEBUG = $(DEBUG_BINDINGS_OBJ) $(JSC_FILES_DEBUG)
JSC_BINDINGS_DEBUG = $(DEBUG_BINDINGS_OBJ) $(JSC_FILES_DEBUG)
RELEASE_FLAGS=
DEBUG_FLAGS=
@@ -432,38 +394,24 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
-lz \
$(BUN_DEPS_OUT_DIR)/picohttpparser.o \
$(_MIMALLOC_LINK) \
-ldecrepit \
-lssl \
-lcrypto \
-llolhtml
-llolhtml \
-lonig \
$(BUN_DEPS_OUT_DIR)/libbacktrace.a \
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-larchive \
-lbase64 \
-ltcc \
-lusockets \
-lcares \
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
STATIC_MUSL_FLAG ?=
WRAP_SYMBOLS_ON_LINUX =
ifeq ($(OS_NAME), linux)
WRAP_SYMBOLS_ON_LINUX = -Wl,--wrap=fcntl -Wl,--wrap=fcntl64 -Wl,--wrap=stat64 -Wl,--wrap=pow -Wl,--wrap=exp -Wl,--wrap=log -Wl,--wrap=log2 \
-Wl,--wrap=lstat \
-Wl,--wrap=stat \
-Wl,--wrap=fstat \
-Wl,--wrap=fstatat \
-Wl,--wrap=lstat64 \
-Wl,--wrap=stat64 \
-Wl,--wrap=fstat64 \
-Wl,--wrap=fstatat64 \
-Wl,--wrap=mknod \
-Wl,--wrap=mknodat \
-Wl,--wrap=statx
PLATFORM_LINKER_FLAGS = $(BUN_CFLAGS) \
-fuse-ld=lld \
-Wl,-z,now \
@@ -474,15 +422,15 @@ PLATFORM_LINKER_FLAGS = $(BUN_CFLAGS) \
-static-libgcc \
-fno-omit-frame-pointer \
-Wl,--compress-debug-sections,zlib \
-l:libatomic.a \
${STATIC_MUSL_FLAG} \
-Wl,-Bsymbolic-functions \
-fno-semantic-interposition \
-flto \
-Wl,--allow-multiple-definition \
-rdynamic
endif
@@ -495,9 +443,9 @@ BUN_LLD_FLAGS_WITHOUT_JSC = $(ARCHIVE_FILES) \
BUN_LLD_FLAGS = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES) $(BINDINGS_OBJ) -lwebcrypto
BUN_LLD_FLAGS_DEBUG = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(DEBUG_BINDINGS_OBJ) -lwebcrypto-debug
BUN_LLD_FLAGS_FAST = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(WRAP_SYMBOLS_ON_LINUX) $(JSC_FILES_DEBUG) $(BINDINGS_OBJ) -lwebcrypto-debug
BUN_LLD_FLAGS = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(JSC_FILES) $(BINDINGS_OBJ) -lwebcrypto
BUN_LLD_FLAGS_DEBUG = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(JSC_FILES_DEBUG) $(DEBUG_BINDINGS_OBJ) -lwebcrypto-debug
BUN_LLD_FLAGS_FAST = $(BUN_LLD_FLAGS_WITHOUT_JSC) $(JSC_FILES_DEBUG) $(BINDINGS_OBJ) -lwebcrypto-debug
CLANG_VERSION = $(shell $(CC) --version | awk '/version/ {for(i=1; i<=NF; i++){if($$i=="version"){split($$(i+1),v,".");print v[1]}}}')
@@ -506,12 +454,16 @@ CLANG_VERSION = $(shell $(CC) --version | awk '/version/ {for(i=1; i<=NF; i++){i
bun:
npm-install:
$(NPM_CLIENT) install --ignore-scripts --production
print-% : ; @echo $* = $($*)
$(NPM_CLIENT) install
.PHONY: base64
base64:
cd src/base64 && \
rm -rf src/base64/*.{o,ll,bc} && \
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(BUN_CFLAGS) $(OPTIMIZATION_LEVEL) -g -fPIC -c *.c -I$(SRC_DIR)/base64 && \
$(CXX) $(EMIT_LLVM_FOR_RELEASE) $(CXXFLAGS) $(BUN_CFLAGS) -c neonbase64.cc -g -fPIC && \
$(AR) rcvs $(BUN_DEPS_OUT_DIR)/libbase64.a ./*.o
# Prevent dependency on libtcc1 so it doesn't do filesystem lookups
TINYCC_CFLAGS= -DTCC_LIBTCC1=\"\0\"
@@ -523,7 +475,7 @@ TINYCC_CFLAGS= -DTCC_LIBTCC1=\"\0\"
tinycc:
cd $(TINYCC_DIR) && \
make clean && \
AR=$(AR) $(CCACHE_CC_FLAG) CFLAGS='$(CFLAGS_WITHOUT_MARCH) $(NATIVE_OR_OLD_MARCH) -mtune=native $(TINYCC_CFLAGS)' ./configure --enable-static --cc=$(CCACHE_CC_OR_CC) --ar=$(AR) --config-predefs=yes && \
AR=$(AR) CC=$(CC) CFLAGS='$(CFLAGS_WITHOUT_MARCH) $(NATIVE_OR_OLD_MARCH) -mtune=native $(TINYCC_CFLAGS)' ./configure --enable-static --cc=$(CC) --ar=$(AR) --config-predefs=yes && \
make -j10 && \
cp $(TINYCC_DIR)/*.a $(BUN_DEPS_OUT_DIR)
@@ -549,25 +501,14 @@ builtins: ## to generate builtins
.PHONY: generate-builtins
generate-builtins: builtins
.PHONY: vendor-without-check
vendor-without-check: npm-install node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive libbacktrace lolhtml usockets uws base64 tinycc oniguruma
BUN_TYPES_REPO_PATH ?= $(realpath packages/bun-types)
ifeq ($(DEBUG),true)
BUN_RELEASE_BIN = bun
endif
.PHONY: c-ares
c-ares:
rm -rf $(BUN_DEPS_DIR)/c-ares/build && \
mkdir $(BUN_DEPS_DIR)/c-ares/build && \
cd $(BUN_DEPS_DIR)/c-ares/build && \
cmake $(CMAKE_FLAGS) -DCMAKE_C_FLAGS="$(CFLAGS) -flto=full" -DCMAKE_BUILD_TYPE=Release -DCARES_STATIC=ON -DCARES_STATIC_PIC=ON -DCARES_SHARED=OFF -G "Ninja" .. && \
ninja && cp lib/libcares.a $(BUN_DEPS_OUT_DIR)/libcares.a
BUN_TYPES_REPO_PATH ?= $(realpath ../bun-types)
.PHONY: prepare-types
prepare-types:
BUN_VERSION=$(PACKAGE_JSON_VERSION) $(BUN_RELEASE_BIN) $(BUN_TYPES_REPO_PATH)/scripts/bundle.ts $(BUN_TYPES_REPO_PATH)/dist
BUN_VERSION=$(PACKAGE_JSON_VERSION) $(BUN_RELEASE_BIN) $(BUN_TYPES_REPO_PATH)/bundle.ts $(BUN_TYPES_REPO_PATH)/dist
echo "Generated types for $(PACKAGE_JSON_VERSION) in $(BUN_TYPES_REPO_PATH)/dist"
cp $(BUN_TYPES_REPO_PATH)/dist/types.d.ts /tmp/bun-types.d.ts
cd /tmp && $(PACKAGE_DIR)/../../node_modules/.bin/tsc /tmp/bun-types.d.ts
@@ -575,13 +516,12 @@ prepare-types:
release-types:
# can be removed when/if "bun publish" is implemented
@npm --version >/dev/null 2>&1 || (echo -e "ERROR: npm is required."; exit 1)
cd $(BUN_TYPES_REPO_PATH)/dist && npm publish --dry-run
cd $(BUN_TYPES_REPO_PATH)/dist && npm publish
.PHONY: format
format: ## to format the code
-$(PRETTIER) --write 'test/bun.js/*.{js,jsx,ts,tsx}'
-$(PRETTIER) --write 'test/bun.js/solid-dom-fixtures/**/*.{js,jsx,ts,tsx}'
$(PRETTIER) --write test/bun.js/*.js
$(PRETTIER) --write test/bun.js/solid-dom-fixtures/**/*.js
.PHONY: lolhtml
lolhtml:
@@ -590,7 +530,7 @@ lolhtml:
# no asm is not worth it!!
.PHONY: boringssl-build
boringssl-build:
cd $(BUN_DEPS_DIR)/boringssl && mkdir -p build && cd build && CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" -GNinja .. && ninja libcrypto.a libssl.a libdecrepit.a
cd $(BUN_DEPS_DIR)/boringssl && mkdir -p build && cd build && CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" -GNinja .. && ninja
.PHONY: boringssl-build-debug
boringssl-build-debug:
@@ -599,7 +539,6 @@ boringssl-build-debug:
boringssl-copy:
cp $(BUN_DEPS_DIR)/boringssl/build/ssl/libssl.a $(BUN_DEPS_OUT_DIR)/libssl.a
cp $(BUN_DEPS_DIR)/boringssl/build/crypto/libcrypto.a $(BUN_DEPS_OUT_DIR)/libcrypto.a
cp $(BUN_DEPS_DIR)/boringssl/build/decrepit/libdecrepit.a $(BUN_DEPS_OUT_DIR)/libdecrepit.a
.PHONY: boringssl
boringssl: boringssl-build boringssl-copy
@@ -610,6 +549,21 @@ boringssl-debug: boringssl-build-debug boringssl-copy
compile-ffi-test:
clang $(OPTIMIZATION_LEVEL) -shared -undefined dynamic_lookup -o /tmp/bun-ffi-test.dylib -fPIC ./test/bun.js/ffi-test.c
.PHONY: libbacktrace
libbacktrace:
cd $(BUN_DEPS_DIR)/libbacktrace && \
CFLAGS="$(CFLAGS)" CC=$(CC) ./configure --disable-shared --enable-static --with-pic && \
make -j$(CPUS) && \
cp ./.libs/libbacktrace.a $(BUN_DEPS_OUT_DIR)/libbacktrace.a
.PHONY: oniguruma
oniguruma:
cd $(BUN_DEPS_DIR)/oniguruma && \
autoreconf -vfi && \
CFLAGS="$(CFLAGS)" CC=$(CC) ./configure && \
make -j${CPUS} && \
cp ./src/.libs/libonig.a $(BUN_DEPS_OUT_DIR)/libonig.a
sqlite:
@@ -619,7 +573,7 @@ libarchive:
(make clean || echo ""); \
(./build/clean.sh || echo ""); \
./build/autogen.sh; \
CFLAGS="$(CFLAGS)" $(CCACHE_CC_FLAG) ./configure --disable-shared --enable-static --with-pic --disable-bsdtar --disable-bsdcat --disable-rpath --enable-posix-regex-lib --without-xml2 --without-expat --without-openssl --without-iconv --without-zlib; \
CFLAGS="$(CFLAGS)" CC=$(CC) ./configure --disable-shared --enable-static --with-pic --disable-bsdtar --disable-bsdcat --disable-rpath --enable-posix-regex-lib --without-xml2 --without-expat --without-openssl --without-iconv --without-zlib; \
make -j${CPUS}; \
cp ./.libs/libarchive.a $(BUN_DEPS_OUT_DIR)/libarchive.a;
@@ -635,8 +589,11 @@ tgz-debug:
$(CXX) $(DEBUG_PACKAGE_DIR)/tgz.o -g -o ./misctools/tgz $(DEFAULT_LINKER_FLAGS) -lc $(ARCHIVE_FILES)
rm -rf $(DEBUG_PACKAGE_DIR)/tgz.o
vendor: require init-submodules vendor-without-check
zlib:
cd $(BUN_DEPS_DIR)/zlib; make clean; $(CCACHE_CC_FLAG) CFLAGS="$(CFLAGS)" ./configure --static && make -j${CPUS} && cp ./libz.a $(BUN_DEPS_OUT_DIR)/libz.a
cd $(BUN_DEPS_DIR)/zlib; CFLAGS="$(CFLAGS)" cmake $(CMAKE_FLAGS) .; CFLAGS="$(CFLAGS)" make;
cp $(BUN_DEPS_DIR)/zlib/libz.a $(BUN_DEPS_OUT_DIR)/libz.a
ifeq ($(POSIX_PKG_MANAGER), brew)
PKGNAME_NINJA := ninja
@@ -647,7 +604,7 @@ endif
.PHONY: require
require:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@15"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "13" ]; then echo -e "ERROR: clang version >=13 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@13"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@esbuild --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@$(NPM_CLIENT) --version >/dev/null 2>&1 || (echo -e "ERROR: NPM client (bun or npm) is required."; exit 1)
@@ -726,12 +683,12 @@ USOCKETS_SRC_DIR = $(BUN_DEPS_DIR)/uws/uSockets/src/
usockets:
rm -rf $(BUN_DEPS_DIR)/uws/uSockets/*.o $(BUN_DEPS_DIR)/uws/uSockets/**/*.o $(BUN_DEPS_DIR)/uws/uSockets/*.a $(BUN_DEPS_DIR)/uws/uSockets/*.bc
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(CC) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -g -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/libusockets.a $(USOCKETS_DIR)/*.{o,bc}
uws: usockets
$(CXX_WITH_CCACHE) $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
$(CXX) $(BITCODE_OR_SECTIONS) $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
.PHONY: sign-macos-x64
sign-macos-x64:
@@ -897,7 +854,7 @@ clone-submodules:
git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress
.PHONY: devcontainer
devcontainer: $(OBJ_DIR) $(DEBUG_OBJ_DIR) clone-submodules mimalloc zlib libarchive boringssl picohttp identifier-cache node-fallbacks npm-install api analytics bun_error fallback_decoder bindings uws lolhtml usockets tinycc c-ares runtime_js_dev sqlite webcrypto-debug webcrypto
devcontainer: $(OBJ_DIR) $(DEBUG_OBJ_DIR) clone-submodules libbacktrace mimalloc zlib libarchive boringssl picohttp identifier-cache node-fallbacks npm-install api analytics bun_error fallback_decoder bindings uws lolhtml usockets base64 tinycc runtime_js_dev sqlite oniguruma webcrypto-debug webcrypto
.PHONY: devcontainer-build
devcontainer-build:
@@ -935,7 +892,7 @@ MIMALLOC_OVERRIDE_FLAG ?=
bump:
expr 0.4.0 + 1 > build-id
expr 0.2.0 + 1 > build-id
.PHONY: identifier-cache
identifier-cache:
@@ -1243,11 +1200,6 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/OperandsInlines.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/OperandsInlines.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/Operands.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/Operands.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/domjit/DOMJITHeapRange.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DOMJITHeapRange.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GeneratorPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GeneratorPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GeneratorFunctionPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GeneratorFunctionPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AsyncFunctionPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AsyncFunctionPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
# This is a workaround for a JSC bug that impacts aarch64
@@ -1276,7 +1228,7 @@ jsc-build-mac-compile:
-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON \
$(WEBKIT_DIR) \
$(WEBKIT_RELEASE_DIR) && \
CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \
CFLAGS="$(CFLAGS) $(BITCODE_OR_SECTIONS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) $(BITCODE_OR_SECTIONS) -ffat-lto-objects" \
cmake --build $(WEBKIT_RELEASE_DIR) --config Release --target jsc
.PHONY: jsc-build-mac-compile-lto
@@ -1378,7 +1330,6 @@ clean: clean-bindings
(cd $(BUN_DEPS_DIR)/boringssl && make clean) || echo "";
(cd $(BUN_DEPS_DIR)/picohttp && make clean) || echo "";
(cd $(BUN_DEPS_DIR)/zlib && make clean) || echo "";
(cd $(BUN_DEPS_DIR)/c-ares && rm -rf build && make clean) || echo "";
.PHONY: release-bindings
release-bindings: $(OBJ_DIR) $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILES) $(NODE_OS_OBJ_FILES) $(BUILTINS_OBJ_FILES) $(IO_FILES) $(MODULES_OBJ_FILES)
@@ -1446,7 +1397,6 @@ bun-link-lld-debug:
$(DEBUG_BIN)/bun-debug.o \
-W \
-o $(DEBUG_BIN)/bun-debug
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
bun-link-lld-debug-no-jsc:
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
@@ -1475,7 +1425,6 @@ bun-link-lld-release:
$(OPTIMIZATION_LEVEL) $(RELEASE_FLAGS)
rm -rf $(BUN_RELEASE_BIN).dSYM
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
@rm -f $(BUN_RELEASE_BIN).o.o # workaround for https://github.com/ziglang/zig/issues/14080
bun-release-copy-obj:
cp $(BUN_RELEASE_BIN).o $(BUN_DEPLOY_DIR).o
@@ -1517,11 +1466,9 @@ wasm-return1:
generate-classes:
bun src/bun.js/scripts/generate-classes.ts
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
clang-format -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
generate-sink:
bun src/bun.js/scripts/generate-jssink.js
clang-format -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1542,7 +1489,7 @@ $(DEBUG_OBJ_DIR):
mkdir -p $(DEBUG_OBJ_DIR)
$(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(CXX) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1552,7 +1499,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(CXX) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1562,7 +1509,7 @@ $(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1572,7 +1519,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1582,7 +1529,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1592,7 +1539,7 @@ $(OBJ_DIR)/%.o: src/io/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1602,7 +1549,7 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
-c -o $@ $<
$(OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1613,103 +1560,89 @@ $(OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: $(SRC_DIR)/%.cpp
$(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(CXX) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: $(SRC_DIR)/webcore/%.cpp
$(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
.PHONY: src/io/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: $(SRC_DIR)/sqlite/%.cpp
$(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: $(SRC_DIR)/node_os/%.cpp
$(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: src/bun.js/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
.PHONY: src/bun.js/modules/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
$(DEBUG_OBJ_DIR)/webcrypto/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1717,12 +1650,9 @@ $(DEBUG_OBJ_DIR)/webcrypto/%.o: src/bun.js/bindings/webcrypto/%.cpp
-fno-rtti \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
.PHONY: webcrypto-debug-obj
# Make all the .cpp files in the webcrypto directory into .o files using Makefile substitutions
webcrypto-debug-obj: $(patsubst src/bun.js/bindings/webcrypto/%.cpp, $(DEBUG_OBJ_DIR)/webcrypto/%.o, $(wildcard src/bun.js/bindings/webcrypto/*.cpp))
@@ -1736,7 +1666,7 @@ webcrypto-debug:
$(OBJ_DIR)/webcrypto/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(CXX) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
-fno-exceptions \
@@ -1759,7 +1689,7 @@ webcrypto:
sizegen:
mkdir -p $(BUN_TMP_DIR)
$(CXX) src/bun.js/headergen/sizegen.cpp -Wl,-dead_strip -Wl,-dead_strip_dylibs -fuse-ld=lld -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1
$(CXX) src/bun.js/headergen/sizegen.cpp -Wl,-dead_strip -Wl,-dead_strip_dylibs -fuse-ld=lld -o $(BUN_TMP_DIR)/sizegen $(CLANG_FLAGS) -O1
$(BUN_TMP_DIR)/sizegen > src/bun.js/bindings/sizes.zig
@@ -1920,14 +1850,7 @@ copy-to-bun-release-dir-bin:
cp -r $(PACKAGE_DIR)/bun $(BUN_RELEASE_DIR)/bun
cp -r $(PACKAGE_DIR)/bun-profile $(BUN_RELEASE_DIR)/bun-profile
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: vendor-without-check
vendor-without-check: npm-install node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares
.PHONY: vendor
vendor: require init-submodules vendor-without-check
PACKAGE_MAP = --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_$(OS_NAME).zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-end --pkg-end --pkg-end --pkg-end --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin clap $(BUN_DIR)/src/deps/zig-clap/clap.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-begin io $(BUN_DIR)/src/io/io_darwin.zig --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/boringssl.zig --pkg-end --pkg-begin thread_pool $(BUN_DIR)/src/thread_pool.zig --pkg-end --pkg-end --pkg-end --pkg-end --pkg-begin boringssl $(BUN_DIR)/src/boringssl.zig --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin http $(BUN_DIR)/src/http_client_async.zig --pkg-end --pkg-begin strings $(BUN_DIR)/src/string_immutable.zig --pkg-end --pkg-begin picohttp $(BUN_DIR)/src/deps/picohttp.zig --pkg-end --pkg-end
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local

815
README.md

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@@ -1,169 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*

View File

@@ -1,43 +0,0 @@
# expect-to-equal
To install dependencies:
```bash
bun install
```
To run in Bun:
```bash
# so it doesn't run the vitest one
bun wiptest expect-to-equal.test.js
```
To run in Jest:
```bash
# If you remove the import the performance doesn't change much
NODE_OPTIONS="--experimental-vm-modules" ./node_modules/.bin/jest expect-to-equal.test.js
```
To run in Vitest:
```bash
./node_modules/.bin/vitest --run expect-to-equal.vitest.test.js
```
Output on my machine (M1):
bun:test (bun v0.3.0):
> [36.40ms] expect().toEqual() x 10000
jest (node v18.11.0)
> expect().toEqual() x 10000: 5053 ms
vitest (node v18.11.0)
> expect().toEqual() x 10000: 401.08ms
This project was created using `bun init` in bun v0.3.0. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

Binary file not shown.

View File

@@ -1,42 +0,0 @@
// bun:test automatically rewrites this import to bun:test when run in bun
import { test, expect } from "@jest/globals";
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
if (!Number.isSafeInteger(N)) {
throw new Error("Invalid RUN_COUNT");
}
const label = "expect().toEqual() x " + N;
test(label, () => {
console.time(label);
for (let runsLeft = N; runsLeft > 0; runsLeft--) {
expect("hello").toEqual("hello");
expect(123).toEqual(123);
expect({ a: 1, b: 2 }).toEqual({ b: 2, a: 1 });
expect([1, 2, 3]).toEqual([1, 2, 3]);
expect({ a: 1, b: 2 }).not.toEqual({ b: 2, a: 1, c: 3 });
expect([1, 2, 3]).not.toEqual([1, 2, 3, 4]);
expect({ a: 1, b: 2, c: 3 }).not.toEqual({ a: 1, b: 2 });
expect([1, 2, 3, 4]).not.toEqual([1, 2, 3]);
let a = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
let b = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
expect(a).toEqual(b);
expect(b).toEqual(a);
a[0].a = 2;
expect(a).not.toEqual(b);
expect(b).not.toEqual(a);
let c = { [Symbol("test")]: 1 };
let d = { [Symbol("test")]: 1 };
expect(c).not.toEqual(d);
expect(d).not.toEqual(c);
a = { a: 1, b: 2, c: 3 };
b = { a: 1, b: 2 };
expect(a).not.toEqual(b);
}
console.timeEnd(label);
});

View File

@@ -1,41 +0,0 @@
import { test, expect } from "vitest";
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
if (!Number.isSafeInteger(N)) {
throw new Error("Invalid RUN_COUNT");
}
const label = "expect().toEqual() x " + N;
test(label, () => {
console.time(label);
for (let runsLeft = N; runsLeft > 0; runsLeft--) {
expect("hello").toEqual("hello");
expect(123).toEqual(123);
expect({ a: 1, b: 2 }).toEqual({ b: 2, a: 1 });
expect([1, 2, 3]).toEqual([1, 2, 3]);
expect({ a: 1, b: 2 }).not.toEqual({ b: 2, a: 1, c: 3 });
expect([1, 2, 3]).not.toEqual([1, 2, 3, 4]);
expect({ a: 1, b: 2, c: 3 }).not.toEqual({ a: 1, b: 2 });
expect([1, 2, 3, 4]).not.toEqual([1, 2, 3]);
let a = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
let b = [{ a: 1 }, { b: 2, c: 3, d: 4 }, { e: 5, f: 6 }];
expect(a).toEqual(b);
expect(b).toEqual(a);
a[0].a = 2;
expect(a).not.toEqual(b);
expect(b).not.toEqual(a);
let c = { [Symbol("test")]: 1 };
let d = { [Symbol("test")]: 1 };
expect(c).not.toEqual(d);
expect(d).not.toEqual(c);
a = { a: 1, b: 2, c: 3 };
b = { a: 1, b: 2 };
expect(a).not.toEqual(b);
}
console.timeEnd(label);
});

View File

@@ -1 +0,0 @@
console.log("Hello via Bun!");

View File

@@ -1,9 +0,0 @@
{
"name": "expect-to-equal",
"module": "index.ts",
"type": "module",
"devDependencies": {
"jest": "^29.3.1",
"vitest": "^0.25.3"
}
}

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "nodenext",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "preserve",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -7,7 +7,7 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
r.text()
);
}

View File

@@ -7,7 +7,7 @@ bench(`fetch(https://example.com) x ${count}`, async () => {
for (let i = 0; i < requests.length; i++) {
requests[i] = fetch(`https://www.example.com/?cachebust=${i}`).then((r) =>
r.text(),
r.text()
);
}

View File

@@ -3,7 +3,7 @@ import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL(
"src/target/release/libffi_napi_bench." + extension,
import.meta.url,
import.meta.url
).pathname;
const {

Binary file not shown.

View File

@@ -43,7 +43,7 @@ if (process.env.PROJECT === "bun") {
},
shell: false,
},
}
);
}
@@ -114,5 +114,5 @@ async function main() {
main().catch((error) =>
setTimeout(() => {
throw error;
}),
})
);

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import "../src/index.css";
import App from "next/app";

View File

@@ -3,7 +3,6 @@ export function IndexPage() {
return (
<Main
productName={
// @ts-ignore
typeof location !== "undefined" ? location.search.substring(1) : ""
}
/>

View File

@@ -70,9 +70,9 @@ fs.writeFileSync(
`${process.platform}-${
process.arch === "arm64" ? "aarch64" : process.arch
}` +
".json",
".json"
),
JSON.stringify(report, null, 2),
JSON.stringify(report, null, 2)
);
console.log(
@@ -101,7 +101,7 @@ console.log(
TOTAL_FRAMES,
"(" +
Math.round(
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100,
Math.max(Math.min(1.0, timings.length / TOTAL_FRAMES), 0) * 100
) +
"%)",
"%)"
);

View File

@@ -1,11 +1,11 @@
export const Main = (props: { productName: string; cssInJS?: string }) => {
export const Main = ({ productName, cssInJS }) => {
return (
<>
<header>
<div className="Title">CSS HMR Stress Test!</div>
<p className="Description">
This page visually tests how quickly a bundler can update{" "}
{props.cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
{cssInJS ? "CSS-in-JS" : "CSS"} over Hot Module Reloading.
</p>
</header>
<main className="main">
@@ -53,9 +53,9 @@ export const Main = (props: { productName: string; cssInJS?: string }) => {
</div>
<div className="Bundler-container">
<div className="Bundler">{props.productName}</div>
<div className="Bundler">{productName}</div>
<div className="Bundler-updateRate">
{props.cssInJS ? "CSS-in-JS framework: " + props.cssInJS : ""}
{cssInJS ? "CSS-in-JS framework: " + cssInJS : ""}
</div>
</div>
</footer>

View File

@@ -1,8 +1,6 @@
{
"extends": "../../../tsconfig.base.json",
"compilerOptions": {
"baseUrl": ".",
"jsx": "react-jsx",
"paths": {}
}
}
}

View File

@@ -1,12 +1,12 @@
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () =>
JSON.stringify({ hello: "world" }),
JSON.stringify({ hello: "world" })
);
const otherUint8Array = new Uint8Array(1024);
bench("Uint8Array.from(otherUint8Array)", () =>
Uint8Array.from(otherUint8Array),
Uint8Array.from(otherUint8Array)
);
run();

View File

@@ -1,7 +1,5 @@
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () =>
console.log({ hello: "object" }),
);
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
await run();

View File

@@ -23,7 +23,7 @@ export const hello${i} = "hello${i}";
${saveStack ? `globalThis.evaluationOrder.push("${file}");` : ""}
globalThis.counter++;
`,
"utf8",
"utf8"
);
var file2 = output + "/file" + i + ".js";
@@ -39,7 +39,7 @@ module.exports.hello${i} = "hello${i}";
${saveStack ? `globalThis.evaluationOrder.push("${file2}");` : ""}
globalThis.counter++;
`,
"utf8",
"utf8"
);
}
@@ -53,7 +53,7 @@ fs.writeFileSync(
: ""
}
`,
"utf8",
"utf8"
);
fs.writeFileSync(
@@ -66,7 +66,7 @@ fs.writeFileSync(
: ""
}
`,
"utf8",
"utf8"
);
fs.writeFileSync(
@@ -79,7 +79,7 @@ fs.writeFileSync(
console.timeEnd("import");
${saveStack ? `console.log(globalThis.evaluationOrder.join("\\n"));` : ""}
console.log("Loaded", globalThis.counter, "files", "totaling", new Intl.NumberFormat().format(globalThis.exportCounter), 'exports');`,
"utf8",
"utf8"
);
fs.writeFileSync(
@@ -92,7 +92,7 @@ export const THE_END = Foo.THE_END;
console.timeEnd("import.meta.require");
${saveStack ? `console.log(globalThis.evaluationOrder.join("\\n"));` : ""}
console.log("Loaded", globalThis.counter, "files", "totaling", new Intl.NumberFormat().format(globalThis.exportCounter), 'exports');`,
"utf8",
"utf8"
);
fs.writeFileSync(
@@ -106,7 +106,7 @@ fs.writeFileSync(
console.timeEnd("import.meta.require");
${saveStack ? `console.log(globalThis.evaluationOrder.join("\\n"));` : ""}
console.log("Loaded", globalThis.counter, "files", "totaling", new Intl.NumberFormat().format(globalThis.exportCounter), 'exports');`,
"utf8",
"utf8"
);
fs.writeFileSync(
@@ -120,7 +120,7 @@ fs.writeFileSync(
${saveStack ? `console.log(globalThis.evaluationOrder.join("\\n"));` : ""}
console.log("Loaded", globalThis.counter, "files", "totaling", new Intl.NumberFormat().format(globalThis.exportCounter), 'exports');
`,
"utf8",
"utf8"
);
console.log(`

View File

@@ -14,8 +14,5 @@
"async": "cd async && bun run deps && bun run build && bun run bench",
"sqlite": "cd sqlite && bun run deps && bun run build && bun run bench",
"modules:node_os": "cd modules/node_os && bun run deps &&bun run build && bun run bench"
},
"devDependencies": {
"fast-deep-equal": "^3.1.3"
}
}

Binary file not shown.

View File

@@ -1,12 +0,0 @@
import { bench, run } from "mitata";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations
return args.slice().at(0);
}
bench("Array.prototype.slice.call(arguments)", () => {
return doIt(1, 2, 3, 4, 5, 6);
});
await run();

View File

@@ -1,30 +0,0 @@
import { ArrayBufferSink } from "bun";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";
var long = "Hello World!".repeat(1024);
var longUTF16 = "Hello World 💕💕💕".repeat(1024);
var encoder = new ArrayBufferSink({ stream: true, highWaterMark: 512 });
bench(`${short.length} ascii`, () => {
encoder.write(short);
encoder.start();
});
bench(`${short.length} utf8`, () => {
encoder.write(shortUTF16);
encoder.start();
});
bench(`${long.length} ascii`, () => {
encoder.write(long);
encoder.start();
});
bench(`${longUTF16.length} utf8`, () => {
encoder.write(longUTF16);
encoder.start();
});
await run();

View File

@@ -1,15 +0,0 @@
import { bench, group, run } from "mitata";
import * as assert from "assert";
bench("deepEqual", () => {
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });
});
bench("deepStrictEqual", () => {
assert.deepStrictEqual(
{ foo: "123", beep: "boop" },
{ foo: "123", beep: "boop" },
);
});
await run();

View File

@@ -5,19 +5,6 @@ bench("async function(){}", async function () {});
bench("await 1", async function () {
return await 1;
});
function callnextTick(resolve) {
process.nextTick(resolve);
}
function awaitNextTick() {
return new Promise(callnextTick);
}
bench("promise.nextTick", async function () {
return awaitNextTick();
});
bench("await new Promise(resolve => resolve())", async function () {
await new Promise((resolve) => resolve());
});
@@ -25,7 +12,7 @@ bench(
"Promise.all(Array.from({length: 100}, () => new Promise((resolve) => resolve())))",
async function () {
return Promise.all(Array.from({ length: 100 }, () => Promise.resolve(1)));
},
}
);
await run();

View File

@@ -1,29 +0,0 @@
import { bench, run } from "mitata";
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
bench("new Buffer(0)", () => {
return new Buffer(0);
});
bench(`new Buffer(${N})`, () => {
return new Buffer(N);
});
bench(`Buffer.alloc(${N})`, () => {
return Buffer.alloc(N);
});
bench(`Buffer.allocUnsafe(${N})`, () => {
return Buffer.allocUnsafe(N);
});
bench("Buffer.allocUnsafe(24_000)", () => {
return Buffer.allocUnsafe(24_000);
});
bench("Buffer.alloc(24_000)", () => {
return Buffer.alloc(24_000);
});
await run();

View File

@@ -1,53 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const json = {
login: "wongmjane",
id: 1332975,
node_id: "MDQ6VXNlcjEzMzI5NzU=",
avatar_url: "https://avatars.githubusercontent.com/u/1332975?v=4",
gravatar_id: "",
url: "https://api.github.com/users/wongmjane",
html_url: "https://github.com/wongmjane",
followers_url: "https://api.github.com/users/wongmjane/followers",
following_url:
"https://api.github.com/users/wongmjane/following{/other_user}",
gists_url: "https://api.github.com/users/wongmjane/gists{/gist_id}",
starred_url: "https://api.github.com/users/wongmjane/starred{/owner}{/repo}",
subscriptions_url: "https://api.github.com/users/wongmjane/subscriptions",
organizations_url: "https://api.github.com/users/wongmjane/orgs",
repos_url: "https://api.github.com/users/wongmjane/repos",
events_url: "https://api.github.com/users/wongmjane/events{/privacy}",
received_events_url: "https://api.github.com/users/wongmjane/received_events",
type: "User",
site_admin: false,
name: null,
company: null,
blog: "https://wongmjane.com",
location: null,
email: null,
hireable: null,
bio: null,
twitter_username: "wongmjane",
public_repos: 0,
public_gists: 8,
followers: 1197,
following: 135,
created_at: "2012-01-16T07:01:22Z",
updated_at: "2022-11-23T16:12:24Z",
};
const inspect =
"Bun" in globalThis
? Bun.inspect
: "Deno" in globalThis
? Deno.inspect
: (await import("util")).inspect;
bench("big json object", () => {
console.error(json);
});
bench("inspect big json object", () => {
console.error(inspect(json));
});
await run();

View File

@@ -1,36 +0,0 @@
// so it can run in environments without node module resolution
import { bench, run } from "mitata";
import crypto from "node:crypto";
var foo = Buffer.allocUnsafe(16384);
foo.fill(123);
// if ("Bun" in globalThis) {
// const { CryptoHasher } = Bun;
// bench("CryptoHasher Blake2b256", () => {
// var hasher = new CryptoHasher("blake2b256");
// hasher.update(foo);
// hasher.digest();
// });
// }
bench('crypto.createHash("sha512")', () => {
var hasher = crypto.createHash("sha512");
hasher.update(foo);
hasher.digest();
});
bench('crypto.createHash("sha256")', () => {
var hasher = crypto.createHash("sha256");
hasher.update(foo);
hasher.digest();
});
bench('crypto.createHash("sha1")', () => {
var hasher = crypto.createHash("sha1");
hasher.update(foo);
hasher.digest();
});
await run();

View File

@@ -1,509 +0,0 @@
import { bench, group, run } from "mitata";
import fastDeepEquals from "fast-deep-equal/es6/index";
// const Date = globalThis.Date;
function func1() {}
function func2() {}
const s = Symbol("foo");
const a1 = [1, 2, 3, 4];
a1[s] = "f00";
const a2 = [1, 2, 3, 4];
a2[s] = "f00";
const e1 = new Set();
e1.add([1, 2, 3]);
e1.add("test1");
e1.add(498);
e1.add({ a: 1, b: 2 });
e1.add({ a: 1, b: 434221 });
e1.add({ a: 1, b: 25 });
e1.add({ a: 1, b: 4 });
e1.add({ a: 1, b: 2667 });
e1.add({ a: 1, b: 2 });
e1.add({ a: 1, b: 23426 });
e1.add({ a: 1, b: 672 });
e1.add({ a: 1, b: 28465 });
const e2 = new Set();
e2.add([1, 2, 3]);
e2.add("test1");
e2.add(498);
e1.add({ a: 1, b: 2 });
e1.add({ a: 1, b: 434221 });
e1.add({ a: 1, b: 25 });
e1.add({ a: 1, b: 4 });
e1.add({ a: 1, b: 2667 });
e1.add({ a: 1, b: 2 });
e1.add({ a: 1, b: 23426 });
e1.add({ a: 1, b: 672 });
e1.add({ a: 1, b: 28465 });
const d1 = new Set();
d1.add({ a: 1, b: 2 });
const d2 = new Set();
d2.add({ a: 1, b: 2 });
const fixture = [
{
description: "scalars",
tests: [
{
description: "equal numbers",
value1: 1,
value2: 1,
equal: true,
},
{
description: "not equal numbers",
value1: 1,
value2: 2,
equal: false,
},
{
description: "number and array are not equal",
value1: 1,
value2: [],
equal: false,
},
{
description: "0 and null are not equal",
value1: 0,
value2: null,
equal: false,
},
{
description: "equal strings",
value1: "azzzz",
value2: "azzzz",
equal: true,
},
{
description: "not equal strings",
value1: "azzzz",
value2: "bzzzz",
equal: false,
},
{
description: "empty string and null are not equal",
value1: "",
value2: null,
equal: false,
},
{
description: "null is equal to null",
value1: null,
value2: null,
equal: true,
},
{
description: "equal booleans (true)",
value1: true,
value2: true,
equal: true,
},
{
description: "equal booleans (false)",
value1: false,
value2: false,
equal: true,
},
{
description: "not equal booleans",
value1: true,
value2: false,
equal: false,
},
{
description: "1 and true are not equal",
value1: 1,
value2: true,
equal: false,
},
{
description: "0 and false are not equal",
value1: 0,
value2: false,
equal: false,
},
{
description: "NaN and NaN are equal",
value1: NaN,
value2: NaN,
equal: true,
},
{
description: "0 and -0 are equal",
value1: 0,
value2: -0,
equal: true,
},
{
description: "Infinity and Infinity are equal",
value1: Infinity,
value2: Infinity,
equal: true,
},
{
description: "Infinity and -Infinity are not equal",
value1: Infinity,
value2: -Infinity,
equal: false,
},
],
},
{
description: "objects",
tests: [
{
description: "empty objects are equal",
value1: {},
value2: {},
equal: true,
},
{
description: 'equal objects (same properties "order")',
value1: { a: 1, b: "2" },
value2: { a: 1, b: "2" },
equal: true,
},
{
description: 'equal objects (different properties "order")',
value1: { a: 1, b: "2" },
value2: { b: "2", a: 1 },
equal: true,
},
{
description: "not equal objects (extra property)",
value1: { a: 1, b: "2" },
value2: { a: 1, b: "2", c: [] },
equal: false,
},
{
description: "not equal objects (different property values)",
value1: { a: 1, b: "2", c: 3 },
value2: { a: 1, b: "2", c: 4 },
equal: false,
},
{
description: "not equal objects (different properties)",
value1: { a: 1, b: "2", c: 3 },
value2: { a: 1, b: "2", d: 3 },
equal: false,
},
{
description: "equal objects (same sub-properties)",
value1: { a: [{ b: "c" }] },
value2: { a: [{ b: "c" }] },
equal: true,
},
{
description: "not equal objects (different sub-property value)",
value1: { a: [{ b: "c" }] },
value2: { a: [{ b: "d" }] },
equal: false,
},
{
description: "not equal objects (different sub-property)",
value1: { a: [{ b: "c" }] },
value2: { a: [{ c: "c" }] },
equal: false,
},
{
description: "empty array and empty object are not equal",
value1: {},
value2: [],
equal: false,
},
{
description: "object with extra undefined properties are not equal #1",
value1: {},
value2: { foo: undefined },
equal: false,
},
{
description: "object with extra undefined properties are not equal #2",
value1: { foo: undefined },
value2: {},
equal: false,
},
{
description: "object with extra undefined properties are not equal #3",
value1: { foo: undefined },
value2: { bar: undefined },
equal: false,
},
{
description: "nulls are equal",
value1: null,
value2: null,
equal: true,
},
{
description: "null and undefined are not equal",
value1: null,
value2: undefined,
equal: false,
},
{
description: "null and empty object are not equal",
value1: null,
value2: {},
equal: false,
},
{
description: "undefined and empty object are not equal",
value1: undefined,
value2: {},
equal: false,
},
{
description:
"objects with different `toString` functions returning same values are equal",
value1: { toString: () => "Hello world!" },
value2: { toString: () => "Hello world!" },
equal: true,
},
{
description:
"objects with `toString` functions returning different values are not equal",
value1: { toString: () => "Hello world!" },
value2: { toString: () => "Hi!" },
equal: false,
},
],
},
{
description: "arrays",
tests: [
{
description: "two empty arrays are equal",
value1: [],
value2: [],
equal: true,
},
{
description: "equal arrays",
value1: [1, 2, 3],
value2: [1, 2, 3],
equal: true,
},
{
description: "equal arrays with symbols",
value1: a1,
value2: a2,
equal: true,
},
// {
// description: "not equal arrays (different item)",
// value1: [1, 2, 3],
// value2: [1, 2, 4],
// equal: false,
// },
// {
// description: "not equal arrays (different length)",
// value1: [1, 2, 3],
// value2: [1, 2],
// equal: false,
// },
{
description: "equal arrays of objects",
value1: [
...Array.from({ length: 200000 }, (i) => ({
a: 1,
b: 2,
})),
],
value2: [
...Array.from({ length: 200000 }, (i) => ({
a: 1,
b: 2,
})),
],
equal: true,
},
{
description: "equal objects",
value1: {
a: 1,
b: 2,
c: 3,
d: 4,
// get foo() {
// return 1;
// },
},
value2: {
a: 1,
b: 2,
c: 3,
d: 4,
// get foo() {
// return 1;
// },
},
equal: true,
},
{
description: "equal sets",
value1: d1,
value2: d2,
equal: true,
},
// {
// description: "not equal arrays of objects",
// value1: [{ a: "a" }, { b: "b" }],
// value2: [{ a: "a" }, { b: "c" }],
// equal: false,
// },
// {
// description: "pseudo array and equivalent array are not equal",
// value1: { 0: 0, 1: 1, length: 2 },
// value2: [0, 1],
// equal: false,
// },
],
},
{
description: "Date objects",
tests: [
{
description: "equal date objects",
value1: new Date("2017-06-16T21:36:48.362Z"),
value2: new Date("2017-06-16T21:36:48.362Z"),
equal: true,
},
{
description: "not equal date objects",
value1: new Date("2017-06-16T21:36:48.362Z"),
value2: new Date("2017-01-01T00:00:00.000Z"),
equal: false,
},
{
description: "date and string are not equal",
value1: new Date("2017-06-16T21:36:48.362Z"),
value2: "2017-06-16T21:36:48.362Z",
equal: false,
},
{
description: "date and object are not equal",
value1: new Date("2017-06-16T21:36:48.362Z"),
value2: {},
equal: false,
},
],
},
{
description: "RegExp objects",
tests: [
{
description: "equal RegExp objects",
value1: /foo/,
value2: /foo/,
equal: true,
},
{
description: "not equal RegExp objects (different pattern)",
value1: /foo/,
value2: /bar/,
equal: false,
},
{
description: "not equal RegExp objects (different flags)",
value1: /foo/,
value2: /foo/i,
equal: false,
},
{
description: "RegExp and string are not equal",
value1: /foo/,
value2: "foo",
equal: false,
},
{
description: "RegExp and object are not equal",
value1: /foo/,
value2: {},
equal: false,
},
],
},
{
description: "functions",
tests: [
{
description: "same function is equal",
value1: func1,
value2: func1,
equal: true,
},
{
description: "different functions are not equal",
value1: func1,
value2: func2,
equal: false,
},
],
},
{
description: "sample objects",
tests: [
{
description: "big object",
value1: {
prop1: "value1",
prop2: "value2",
prop3: "value3",
prop4: {
subProp1: "sub value1",
subProp2: {
subSubProp1: "sub sub value1",
subSubProp2: [1, 2, { prop2: 1, prop: 2 }, 4, 5],
},
},
prop5: 1000,
// prop6: new Date(2016, 2, 10),
},
value2: {
prop5: 1000,
prop3: "value3",
prop1: "value1",
prop2: "value2",
// prop6: new Date(2016, 2, 10),
prop4: {
subProp2: {
subSubProp1: "sub sub value1",
subSubProp2: [1, 2, { prop2: 1, prop: 2 }, 4, 5],
},
subProp1: "sub value1",
},
},
equal: true,
},
],
},
];
for (let { tests, description } of fixture) {
// if (description === "sample objects") {
for (let { description: describe, value1, value2, equal } of tests) {
var expected;
group(describe, () => {
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
bench(equalsFn.name, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(
`Expected ${expected} to be ${equal} for ${description}`,
);
}
});
}
});
// }
}
}
await run();

View File

@@ -1,145 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const properties = {
closed: {
get() {
return this._writableState ? this._writableState.closed : false;
},
},
destroyed: {
get() {
return this._writableState ? this._writableState.destroyed : false;
},
set(value) {
if (this._writableState) {
this._writableState.destroyed = value;
}
},
},
writable: {
get() {
const w = this._writableState;
return (
!!w &&
w.writable !== false &&
!w.destroyed &&
!w.errored &&
!w.ending &&
!w.ended
);
},
set(val) {
if (this._writableState) {
this._writableState.writable = !!val;
}
},
},
writableFinished: {
get() {
return this._writableState ? this._writableState.finished : false;
},
},
writableObjectMode: {
get() {
return this._writableState ? this._writableState.objectMode : false;
},
},
writableBuffer: {
get() {
return this._writableState && this._writableState.getBuffer();
},
},
writableEnded: {
get() {
return this._writableState ? this._writableState.ending : false;
},
},
writableNeedDrain: {
get() {
const wState = this._writableState;
if (!wState) return false;
return !wState.destroyed && !wState.ending && wState.needDrain;
},
},
writableHighWaterMark: {
get() {
return this._writableState && this._writableState.highWaterMark;
},
},
writableCorked: {
get() {
return this._writableState ? this._writableState.corked : 0;
},
},
writableLength: {
get() {
return this._writableState && this._writableState.length;
},
},
errored: {
enumerable: false,
get() {
return this._writableState ? this._writableState.errored : null;
},
},
writableAborted: {
enumerable: false,
get: function () {
return !!(
this._writableState.writable !== false &&
(this._writableState.destroyed || this._writableState.errored) &&
!this._writableState.finished
);
},
},
};
var count = 10_000;
bench("Object.defineProperty x " + count, () => {
const prop = {
enumerable: false,
get: function () {
return !!(
this._writableState.writable !== false &&
(this._writableState.destroyed || this._writableState.errored) &&
!this._writableState.finished
);
},
};
for (let i = 0; i < count; i++) {
function Hey() {
return this;
}
Object.defineProperty(Hey.prototype, "writableAborted", prop);
}
});
bench("Object.defineProperties x " + count, () => {
for (let i = 0; i < count; i++) {
function Hey() {
return this;
}
Object.defineProperties(Hey.prototype, properties);
}
});
bench("(all the keys) Object.defineProperties x " + count, () => {
var first;
{
function Hey() {
return this;
}
Object.defineProperties(Hey.prototype, properties);
first = Object.getOwnPropertyDescriptors(Hey.prototype);
}
for (let i = 0; i < count; i++) {
function Hey() {
return this;
}
Object.defineProperties(Hey.prototype, first);
}
});
await run();

View File

@@ -1,76 +0,0 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "mitata";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";
const run = () => lookup(tld).catch(() => {});
const total = 50;
var remain = total;
var done;
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
bench("(cached in batch) dns.lookup remote x 50", async () => {
var tld = Math.random().toString(16) + ".example.com";
const run = () => lookup(tld).catch(() => {});
const total = 50;
var remain = total;
var done;
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
bench("dns.lookup remote x 50", async () => {
var remain = 50;
var done;
const run = () =>
lookup(Math.random().toString() + ".example.com").catch(() => {});
await new Promise((resolve) => {
for (var i = 0; i < 50; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
bench("dns.resolve remote x 50", async () => {
var remain = 50;
var done;
const run = () =>
resolve(Math.random().toString() + ".example.com").catch(() => {});
await new Promise((resolve) => {
for (var i = 0; i < 50; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
await run();

View File

@@ -1,76 +0,0 @@
import { dns } from "bun";
import { bench, run, group } from "mitata";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of [
"libc",
"c-ares",
process.platform === "darwin" ? "system" : "",
].filter(Boolean))
bench(backend, fn(backend));
});
}
forEachBackend("dns.lookup remote x 50", (backend) => async () => {
const run = () =>
dns
.lookup(Math.random().toString(16) + ".example.com", { backend })
.catch(() => {});
var remain = 16;
var done;
await new Promise((resolve) => {
for (var i = 0; i < 16; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
});
forEachBackend("(cached) dns.lookup remote x 50", (backend) => {
var tld = "example.com";
const run = () => dns.lookup(tld, { backend }).catch(() => {});
return async () => {
const total = 50;
var remain = total;
var done;
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
};
});
forEachBackend(
"(cached in batch) dns.lookup remote x 50",
(backend) => async () => {
var tld = Math.random().toString(16) + ".example.com";
const run = () => dns.lookup(tld, { backend }).catch(() => {});
const total = 50;
var remain = total;
var done;
await new Promise((resolve) => {
for (var i = 0; i < total; i++)
run().finally(() => {
remain--;
if (remain === 0) {
done();
}
});
done = resolve;
});
},
);
await run();

View File

@@ -16,55 +16,18 @@ bench("EventEmitter.emit", () => {
});
});
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", (event) => {
event.preventDefault();
});
bench("[monkey] EventEmitter.emit", () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench("EventEmitter.on x 10_000 (handler)", () => {
var cb = (event) => {
var cb = () => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
bench("[monkey] EventEmitter.on x 10_000 (handler)", () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
var target = new EventTarget();

View File

@@ -1,10 +0,0 @@
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
const encoder = new TextEncoder();
const buffer = new Uint8Array(1024);
bench("encodeInto", () => {
encoder.encodeInto("Hello World!", buffer);
});
await run();

View File

@@ -1,45 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// pure JS implementation will optimze this out
// bench("new Headers", function () {
// return new Headers();
// });
var big = new Headers({
"Content-Type": "text/plain",
"Content-Length": "123",
hello: "there",
"X-Custom-Header": "Hello World",
"X-Another-Custom-Header": "Hello World",
"X-Yet-Another-Custom-ader": "Hello World",
"X-Yet-Another-Custom-Heder": "Hello World",
"X-Yet-Another-Custom-Heade": "Hello World",
"X-Yet-Another-Custom-Headz": "Hello Worlda",
});
// bench("Header.get", function () {
// return big.get("Content-Type");
// });
// bench("Header.set (standard)", function () {
// return big.set("Content-Type", "text/html");
// });
// bench("Header.set (non-standard)", function () {
// return big.set("X-My-Custom", "text/html123");
// });
if (big.toJSON)
bench("Headers.toJSON", function () {
return big.toJSON();
});
bench("Object.fromEntries(headers.entries())", function () {
return Object.fromEntries(big.entries());
});
bench("Object.fromEntries(headers)", function () {
return Object.fromEntries(big);
});
run();

View File

@@ -1,8 +0,0 @@
var i = 0;
Deno.serve({
port: parseInt(Deno.env.get("PORT") || "3000", 10),
handler(req) {
if (i++ === 200_000 - 1) queueMicrotask(() => Deno.exit(0));
return new Response("Hello, World!" + i);
},
});

View File

@@ -1,7 +0,0 @@
var i = 0;
export default {
fetch(req) {
if (i++ === 200_000 - 1) queueMicrotask(() => process.exit(0));
return new Response("Hello, World!" + i);
},
};

View File

@@ -1,8 +0,0 @@
import { createServer } from "node:http";
var i = 0;
const server = createServer((req, res) => {
res.writeHead(200);
res.end("Hello, World!" + i);
if (i++ === 200_000 - 1) queueMicrotask(() => process.exit(0));
}).listen(parseInt(process.env.PORT || "3000", 10));

View File

@@ -1,47 +0,0 @@
const obj = {
a: 1,
b: 2,
c: 3,
d: 4,
e: 5,
f: 6,
g: 7,
h: 8,
i: 9,
j: 10,
k: 11,
l: 12,
m: 13,
n: 14,
o: 15,
p: 16,
q: 17,
r: 18,
s: 19,
t: 20,
u: 21,
v: 22,
w: 23,
};
import { bench, group, run } from "mitata";
var val = 0;
bench("Object.values(literal)", () => {
obj.a = val++;
Object.values(obj);
});
const objWithMethods = {
...obj,
toString() {},
valueOf() {},
[Symbol.iterator]() {},
[Symbol.toPrimitive]() {},
};
var val = 0;
bench("Object.values(literal with methods)", () => {
objWithMethods.a = val++;
Object.values(objWithMethods);
});
await run();

View File

@@ -1,21 +0,0 @@
import { bench, run } from "mitata";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");
});
const long = "hey".repeat(10000);
bench("process.stderr.write('hey'.repeat(10_000))", () => {
process.stderr.write(long);
});
const longUTF16 = "🥟🐰".repeat(10000);
bench("process.stderr.write('🥟🐰')", () => {
process.stderr.write("🥟🐰");
});
bench("process.stderr.write('🥟🐰'.repeat(10_000))", () => {
process.stderr.write(longUTF16);
});
await run();

View File

@@ -1,45 +0,0 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "mitata";
var short = (function () {
const text = "Hello World!";
const path = "/tmp/bun-bench-short.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var shortUTF16 = (function () {
const text = "Hello World 💕💕💕";
const path = "/tmp/bun-bench-shortUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var long = (function () {
const text = "Hello World!".repeat(1024);
const path = "/tmp/bun-bench-long.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
var longUTF16 = (function () {
const text = "Hello World 💕💕💕".repeat(1024);
const path = "/tmp/bun-bench-longUTF16.text";
writeFileSync(path, text, "utf8");
return { path, length: text.length };
})();
bench(`${short.length} ascii`, () => {
readFileSync(short.path, "utf-8");
});
bench(`${short.length} utf8`, () => {
readFileSync(shortUTF16.path, "utf-8");
});
bench(`${long.length} ascii`, () => {
readFileSync(long.path, "utf-8");
});
bench(`${longUTF16.length} utf8`, () => {
readFileSync(longUTF16.path, "utf-8");
});
await run();

View File

@@ -1,9 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("spawnSync echo hi", () => {
Deno.spawnSync("echo", {
args: ["hi"],
});
});
await run();

View File

@@ -1,24 +0,0 @@
import { run, bench } from "mitata";
var writer = globalThis.Bun ? Bun.stderr.writer() : undefined;
if (writer)
bench('Bun.stderr.write("Hello World")', () => {
writer.write("Hello World\n");
writer.flush();
});
if (process.stderr) {
bench("process.stderr.write", () => {
process.stderr.write("Hello World\n");
});
}
bench("console.error('Hello World')", () => {
console.error("Hello World");
});
bench("console.error('Hello World', 'wat')", () => {
console.error("Hello World", "wat");
});
await run({ percentiles: false });

View File

@@ -34,18 +34,16 @@ setInterval(() => {
counter = 0;
}, 1000);
if (process.env.IS_SERVER)
listen({
socket: handlers,
hostname: "0.0.0.0",
port: 8000,
data: {
isServer: true,
},
});
else
await connect({
socket: handlers,
hostname: "localhost",
port: 8000,
});
const server = listen({
socket: handlers,
hostname: "localhost",
port: 8080,
data: {
isServer: true,
},
});
const connection = await connect({
socket: handlers,
hostname: "localhost",
port: 8080,
});

View File

@@ -29,68 +29,24 @@ const handlers = {
},
};
if (process.env.IS_SERVER) {
if (net.createServer) {
const server = net.createServer(function (socket) {
socket.data = { isServer: true };
socket.on("connection", handlers.open.bind(socket));
socket.on("data", handlers.data.bind(socket));
socket.on("drain", handlers.drain.bind(socket));
socket.setEncoding("binary");
});
setInterval(() => {
console.log("Wrote", counter, "messages");
counter = 0;
}, 1000);
server.listen(8000);
} else {
const handlers = {
open(socket) {
if (!socket.data?.isServer) {
if (!socket.write(msg)) {
socket.data = { pending: msg };
}
}
},
data(socket, buffer) {
if (!socket.write(buffer)) {
socket.data = { pending: buffer };
return;
}
counter++;
},
drain(socket) {
const pending = socket.data?.pending;
if (!pending) return;
if (socket.write(pending)) {
socket.data = undefined;
counter++;
return;
}
},
};
setInterval(() => {
console.log("Wrote", counter, "messages");
counter = 0;
}, 1000);
const server = Bun.listen({
socket: handlers,
hostname: "0.0.0.0",
port: 8000,
data: {
isServer: true,
},
});
}
} else {
const socket = net.connect({ host: "0.0.0.0", port: 8000 }, () => {});
const server = net.createServer(function (socket) {
socket.data = { isServer: true };
socket.on("connection", handlers.open.bind(socket));
socket.on("data", handlers.data.bind(socket));
socket.on("drain", handlers.drain.bind(socket));
socket.setEncoding("binary");
socket.write(buffer);
}
});
setInterval(() => {
console.log("Wrote", counter, "messages");
counter = 0;
}, 1000);
server.listen(8000);
const socket = net.connect({ host: "localhost", port: 8000 }, () => {});
socket.on("connection", handlers.open.bind(socket));
socket.on("data", handlers.data.bind(socket));
socket.on("drain", handlers.drain.bind(socket));
socket.setEncoding("binary");
socket.write(buffer);

View File

@@ -1,49 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
var short = new TextEncoder().encode("Hello World!");
var shortUTF16 = new TextEncoder().encode("Hello World 💕💕💕");
var long = new TextEncoder().encode("Hello World!".repeat(1024));
var longUTF16 = new TextEncoder().encode("Hello World 💕💕💕".repeat(1024));
bench(`${short.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(short);
});
bench(`${short.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(shortUTF16);
});
bench(`${long.length} ascii`, () => {
var decoder = new TextDecoder();
decoder.decode(long);
});
bench(`${longUTF16.length} utf8`, () => {
var decoder = new TextDecoder();
decoder.decode(longUTF16);
});
if ("Buffer" in globalThis) {
const buffer_short = Buffer.from(short);
bench(`Buffer ${buffer_short.length} ascii`, () => {
buffer_short.toString("ascii");
});
const buffer_shortUTF16 = Buffer.from(short);
bench(`Buffer ${buffer_shortUTF16.length} utf8`, () => {
buffer_shortUTF16.toString("utf8");
});
const buffer_long = Buffer.from(long);
bench(`Buffer ${buffer_long.length} ascii`, () => {
buffer_long.toString("ascii");
});
const buffer_longUTF16 = Buffer.from(longUTF16);
bench(`Buffer ${buffer_longUTF16.length} utf8`, () => {
buffer_longUTF16.toString("utf8");
});
}
await run();

View File

@@ -1,33 +0,0 @@
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";
var long = "Hello World!".repeat(1024);
var longUTF16 = "Hello World 💕💕💕".repeat(1024);
var encoder = new TextEncoder();
bench(`4 ascii`, () => {
encoder.encode("heyo");
});
bench(`4 utf8`, () => {
encoder.encode("💕💕");
});
bench(`${short.length} ascii`, () => {
encoder.encode(short);
});
bench(`${short.length} utf8`, () => {
encoder.encode(shortUTF16);
});
bench(`${long.length} ascii`, () => {
encoder.encode(long);
});
bench(`${longUTF16.length} utf8`, () => {
encoder.encode(longUTF16);
});
await run();

View File

@@ -1,25 +0,0 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";
var long = "Hello World!".repeat(1024);
var longUTF16 = "Hello World 💕💕💕".repeat(1024);
bench(`${short.length} ascii`, () => {
writeFileSync("/tmp/bun.bench-out.txt", short);
});
bench(`${short.length} utf8`, () => {
writeFileSync("/tmp/bun.bench-out.txt", shortUTF16);
});
bench(`${long.length} ascii`, () => {
writeFileSync("/tmp/bun.bench-out.txt", long);
});
bench(`${longUTF16.length} utf8`, () => {
writeFileSync("/tmp/bun.bench-out.txt", longUTF16);
});
await run();

View File

@@ -1,4 +1,4 @@
import { Database } from "https://deno.land/x/sqlite3@0.7.2/mod.ts";
import { Database } from "https://deno.land/x/sqlite3@0.6.1/mod.ts";
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
const db = new Database("./src/northwind.sqlite");

View File

@@ -6,7 +6,7 @@
"": {
"name": "bench",
"dependencies": {
"better-sqlite3": "^8.0.1"
"better-sqlite3": "^7.6.0"
}
},
"node_modules/base64-js": {
@@ -29,9 +29,9 @@
]
},
"node_modules/better-sqlite3": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-8.0.1.tgz",
"integrity": "sha512-JhTZjpyapA1icCEjIZB4TSSgkGdFgpWZA2Wszg7Cf4JwJwKQmbvuNnJBeR+EYG/Z29OXvR4G//Rbg31BW/Z7Yg==",
"version": "7.6.0",
"resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-7.6.0.tgz",
"integrity": "sha512-wYckL8S8RHP+KKNsZuJGZ7z/6FFmVgwd0U8jSv6t997C+EFR1yvi8p2WIpTb10jiV5rRA5VtMdgtAZFcAnK3Iw==",
"hasInstallScript": true,
"dependencies": {
"bindings": "^1.5.0",
@@ -443,9 +443,9 @@
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="
},
"better-sqlite3": {
"version": "8.0.1",
"resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-8.0.1.tgz",
"integrity": "sha512-JhTZjpyapA1icCEjIZB4TSSgkGdFgpWZA2Wszg7Cf4JwJwKQmbvuNnJBeR+EYG/Z29OXvR4G//Rbg31BW/Z7Yg==",
"version": "7.6.0",
"resolved": "https://registry.npmjs.org/better-sqlite3/-/better-sqlite3-7.6.0.tgz",
"integrity": "sha512-wYckL8S8RHP+KKNsZuJGZ7z/6FFmVgwd0U8jSv6t997C+EFR1yvi8p2WIpTb10jiV5rRA5VtMdgtAZFcAnK3Iw==",
"requires": {
"bindings": "^1.5.0",
"prebuild-install": "^7.1.0"

View File

@@ -1,7 +1,7 @@
{
"name": "bench",
"dependencies": {
"better-sqlite3": "^8.0.1"
"better-sqlite3": "^7.6.0"
},
"scripts": {
"build": "exit 0",

Binary file not shown.

View File

@@ -2,6 +2,9 @@
"name": "websocket-server",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.2.0"
},
"dependencies": {
"bufferutil": "^4.0.7",
"utf-8-validate": "^5.0.10",

View File

@@ -1,4 +1,14 @@
{
"extends": "../../tsconfig.base.json",
"compilerOptions": {}
}
"compilerOptions": {
"lib": ["ESNext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "node",
// so that if your project isn't using TypeScript, it still has autocomplete
"allowJs": true,
// "bun-types" is the important part
"types": ["bun-types"]
}
}

1
build-id Normal file
View File

@@ -0,0 +1 @@
2

260
build.zig
View File

@@ -1,21 +1,18 @@
const std = @import("std");
const resolve_path = @import("./src/resolver/resolve_path.zig");
fn pkgPath(comptime out: []const u8) std.build.FileSource {
if (comptime std.fs.path.dirname(@src().file)) |base| {
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
return .{ .path = outpath };
} else {
return .{ .path = out };
}
const outpath = comptime std.fs.path.dirname(@src().file).? ++ std.fs.path.sep_str ++ out;
return .{ .path = outpath };
}
pub fn addPicoHTTP(step: *std.build.LibExeObjStep, comptime with_obj: bool) void {
step.addIncludePath("src/deps");
step.addIncludeDir("src/deps");
if (with_obj) {
step.addObjectFile("src/deps/picohttpparser.o");
}
step.addIncludePath("src/deps");
step.addIncludeDir("src/deps");
if (with_obj) {
step.addObjectFile(panicIfNotFound("src/deps/picohttpparser.o"));
@@ -43,27 +40,50 @@ const color_map = std.ComptimeStringMap([]const u8, .{
&.{ "yellow", "33m" },
});
var compiler_rt_path: []const u8 = "";
var compiler_rt_path_buf: [std.fs.MAX_PATH_BYTES]u8 = undefined;
fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: std.mem.Allocator, zig_exe: []const u8, target: anytype) !void {
var bun = std.build.Pkg{
.name = "bun",
.source = pkgPath("src/bun_redirect.zig"),
fn addInternalPackages(step: *std.build.LibExeObjStep, _: std.mem.Allocator, target: anytype) !void {
var boringssl: std.build.Pkg = .{
.name = "boringssl",
.source = pkgPath("src/boringssl.zig"),
};
var datetime: std.build.Pkg = .{
.name = "datetime",
.source = pkgPath("src/deps/zig-datetime/src/datetime.zig"),
};
var thread_pool: std.build.Pkg = .{
.name = "thread_pool",
.source = pkgPath("src/thread_pool.zig"),
};
var crash_reporter: std.build.Pkg = .{
.name = "crash_reporter",
.source = pkgPath("src/deps/backtrace.zig"),
};
var picohttp: std.build.Pkg = .{
.name = "picohttp",
.source = pkgPath("src/deps/picohttp.zig"),
};
var io_darwin: std.build.Pkg = .{
.name = "async_io",
.name = "io",
.source = pkgPath("src/io/io_darwin.zig"),
};
var io_linux: std.build.Pkg = .{
.name = "async_io",
.name = "io",
.source = pkgPath("src/io/io_linux.zig"),
};
var io_stub: std.build.Pkg = .{
.name = "async_io",
.name = "io",
.source = pkgPath("src/io/io_stub.zig"),
};
var lol_html: std.build.Pkg = .{
.name = "lolhtml",
.source = pkgPath("src/deps/lol-html.zig"),
};
var io = if (target.isDarwin())
io_darwin
else if (target.isLinux())
@@ -71,6 +91,21 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: std.mem.Alloca
else
io_stub;
var strings: std.build.Pkg = .{
.name = "strings",
.source = pkgPath("src/string_immutable.zig"),
};
var clap: std.build.Pkg = .{
.name = "clap",
.source = pkgPath("src/deps/zig-clap/clap.zig"),
};
var http: std.build.Pkg = .{
.name = "http",
.source = pkgPath("src/http_client_async.zig"),
};
var javascript_core_real: std.build.Pkg = .{
.name = "javascript_core",
.source = pkgPath("src/jsc.zig"),
@@ -81,55 +116,60 @@ fn addInternalPackages(step: *std.build.LibExeObjStep, allocator: std.mem.Alloca
.source = pkgPath("src/jsc_stub.zig"),
};
var uws: std.build.Pkg = .{
.name = "uws",
.source = pkgPath("src/deps/uws.zig"),
};
var javascript_core = if (target.getOsTag() == .freestanding)
javascript_core_stub
else
javascript_core_real;
javascript_core.dependencies = &[_]std.build.Pkg{};
step.addPackage(io);
step.addPackage(bun);
const paths_to_try = .{
"{s}/../lib/compiler_rt/stack_probe.zig",
"{s}/../../lib/compiler_rt/stack_probe.zig",
"{s}/../../../lib/compiler_rt/stack_probe.zig",
"{s}/../../../../lib/compiler_rt/stack_probe.zig",
"{s}/../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../../lib/zig/compiler_rt/stack_probe.zig",
"{s}/../../../../lib/zig/compiler_rt/stack_probe.zig",
var analytics: std.build.Pkg = .{
.name = "analytics",
.source = pkgPath("src/analytics.zig"),
};
var found = false;
if (compiler_rt_path.len > 0) {
const compiler_rt: std.build.Pkg = .{
.name = "compiler_rt",
.source = .{ .path = compiler_rt_path },
};
found = true;
step.addPackage(compiler_rt);
} else {
inline for (paths_to_try) |path_fmt| {
if (!found) brk: {
// workaround for https://github.com/ziglang/zig/issues/14099
const path = try std.fmt.allocPrint(allocator, path_fmt, .{zig_exe});
var target_path = std.os.realpath(
std.fs.path.resolve(allocator, &.{path}) catch break :brk,
&compiler_rt_path_buf,
) catch break :brk;
const compiler_rt: std.build.Pkg = .{
.name = "compiler_rt",
.source = .{ .path = target_path },
};
found = true;
step.addPackage(compiler_rt);
compiler_rt_path = target_path;
}
}
}
if (!found) {
std.io.getStdErr().writeAll("\nwarning: Could not find compiler_rt. This might cause a build error until https://github.com/ziglang/zig/issues/14099 is fixed.\n\n") catch {};
}
io.dependencies = &.{analytics};
uws.dependencies = &.{boringssl};
javascript_core.dependencies = &.{ http, strings, picohttp, io, uws };
http.dependencies = &.{
strings,
picohttp,
io,
boringssl,
thread_pool,
uws,
};
thread_pool.dependencies = &.{ io, http };
http.dependencies = &.{
strings,
picohttp,
io,
boringssl,
thread_pool,
uws,
};
thread_pool.dependencies = &.{ io, http };
thread_pool.dependencies = &.{
io,
http,
};
step.addPackage(thread_pool);
step.addPackage(picohttp);
step.addPackage(io);
step.addPackage(strings);
step.addPackage(clap);
step.addPackage(http);
step.addPackage(boringssl);
step.addPackage(javascript_core);
step.addPackage(crash_reporter);
step.addPackage(datetime);
step.addPackage(lol_html);
step.addPackage(uws);
}
const BunBuildOptions = struct {
@@ -138,7 +178,6 @@ const BunBuildOptions = struct {
baseline: bool = false,
bindgen: bool = false,
sizegen: bool = false,
base_path: [:0]const u8 = "",
pub fn step(this: BunBuildOptions, b: anytype) *std.build.OptionsStep {
var opts = b.addOptions();
@@ -147,7 +186,6 @@ const BunBuildOptions = struct {
opts.addOption(@TypeOf(this.baseline), "baseline", this.baseline);
opts.addOption(@TypeOf(this.bindgen), "bindgen", this.bindgen);
opts.addOption(@TypeOf(this.sizegen), "sizegen", this.sizegen);
opts.addOption(@TypeOf(this.base_path), "base_path", this.base_path);
return opts;
}
};
@@ -162,18 +200,6 @@ fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
return filepath;
}
const fmt = struct {
pub usingnamespace @import("std").fmt;
pub fn hexInt(value: anytype) @TypeOf(std.fmt.fmtSliceHexLower("")) {
return std.fmt.fmtSliceHexLower(std.mem.asBytes(&value));
}
pub fn hexIntUp(value: anytype) @TypeOf(std.fmt.fmtSliceHexUpper("")) {
return std.fmt.fmtSliceHexUpper(std.mem.asBytes(&value));
}
};
fn updateRuntime() anyerror!void {
var runtime_out_file = try std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only });
const runtime_hash = std.hash.Wyhash.hash(
@@ -182,7 +208,7 @@ fn updateRuntime() anyerror!void {
);
const runtime_version_file = std.fs.cwd().createFile("src/runtime.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/runtime.version", .{});
defer runtime_version_file.close();
runtime_version_file.writer().print("{any}", .{fmt.hexInt(runtime_hash)}) catch unreachable;
runtime_version_file.writer().print("{x}", .{runtime_hash}) catch unreachable;
var fallback_out_file = try std.fs.cwd().openFile("src/fallback.out.js", .{ .mode = .read_only });
const fallback_hash = std.hash.Wyhash.hash(
0,
@@ -191,7 +217,7 @@ fn updateRuntime() anyerror!void {
const fallback_version_file = std.fs.cwd().createFile("src/fallback.version", .{ .truncate = true }) catch std.debug.panic("Failed to create src/fallback.version", .{});
fallback_version_file.writer().print("{any}", .{fmt.hexInt(fallback_hash)}) catch unreachable;
fallback_version_file.writer().print("{x}", .{fallback_hash}) catch unreachable;
fallback_version_file.close();
}
@@ -214,6 +240,7 @@ pub fn build(b: *std.build.Builder) !void {
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
mode = b.standardReleaseOptions();
var exe: *std.build.LibExeObjStep = undefined;
var output_dir_buf = std.mem.zeroes([4096]u8);
var bin_label = if (mode == std.builtin.Mode.Debug) "packages/debug-bun-" else "packages/bun-";
@@ -250,8 +277,8 @@ pub fn build(b: *std.build.Builder) !void {
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
if (b.option([]const u8, "output-dir", "target to install to") orelse std.os.getenv("OUTPUT_DIR")) |output_dir_| {
output_dir = b.pathFromRoot(output_dir_);
if (std.os.getenv("OUTPUT_DIR")) |output_dir_| {
output_dir = output_dir_;
} else {
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
output_dir = b.pathFromRoot(output_dir_base);
@@ -259,13 +286,21 @@ pub fn build(b: *std.build.Builder) !void {
std.fs.cwd().makePath(output_dir) catch {};
const bun_executable_name = if (mode == std.builtin.Mode.Debug) "bun-debug" else "bun";
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
exe = b.addExecutable(bun_executable_name, if (target.getOsTag() == std.Target.Os.Tag.freestanding)
"src/main_wasm.zig"
else
"root.zig";
"src/main.zig");
// exe.setLibCFile("libc.txt");
exe.linkLibC();
// exe.linkLibCpp();
exe.setOutputDir(output_dir);
updateRuntime() catch {};
exe.setTarget(target);
exe.setBuildMode(mode);
b.install_path = output_dir;
const min_version: std.builtin.Version = if (target.getOsTag() != .freestanding)
target.getOsVersionMin().semver
else .{ .major = 0, .minor = 0, .patch = 0 };
@@ -274,9 +309,20 @@ pub fn build(b: *std.build.Builder) !void {
target.getOsVersionMax().semver
else .{ .major = 0, .minor = 0, .patch = 0 };
// exe.want_lto = true;
defer b.default_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
defer b.default_step.dependOn(&b.addLog(
"Build {s} v{} - v{}\n",
.{
triplet,
min_version,
max_version,
},
).step);
var obj_step = b.step("obj", "Build bun as a .o file");
obj_step.dependOn(&b.addLog("Output: {s}/{s}\n", .{ output_dir, bun_executable_name }).step);
var obj = b.addObject(bun_executable_name, root_src);
var obj = b.addObject(bun_executable_name, exe.root_src.?.path);
var default_build_options: BunBuildOptions = brk: {
const is_baseline = arch.isX86() and (target.cpu_model == .baseline or
!std.Target.x86.featureSetHas(target.getCpuFeatures(), .avx2));
@@ -311,7 +357,6 @@ pub fn build(b: *std.build.Builder) !void {
.sha = git_sha,
.baseline = is_baseline,
.bindgen = false,
.base_path = try b.allocator.dupeZ(u8, b.pathFromRoot(".")),
};
};
@@ -323,7 +368,6 @@ pub fn build(b: *std.build.Builder) !void {
try addInternalPackages(
obj,
b.allocator,
b.zig_exe,
target,
);
@@ -349,8 +393,9 @@ pub fn build(b: *std.build.Builder) !void {
).step);
}
defer obj_step.dependOn(&obj.step);
obj_step.dependOn(&obj.step);
obj.setOutputDir(output_dir);
obj.setBuildMode(mode);
var actual_build_options = default_build_options;
@@ -367,9 +412,7 @@ pub fn build(b: *std.build.Builder) !void {
obj.bundle_compiler_rt = true;
obj.omit_frame_pointer = mode != .Debug;
if (b.option(bool, "for-editor", "Do not emit bin, just check for errors") orelse false) {
obj.emit_bin = .no_emit;
}
b.default_step.dependOn(&obj.step);
if (target.getOsTag() == .linux) {
// obj.want_lto = tar;
@@ -386,7 +429,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("headers-obj", "Build JavaScriptCore headers");
var headers_obj: *std.build.LibExeObjStep = b.addObject("headers", "src/bindgen.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
var headers_build_options = default_build_options;
headers_build_options.bindgen = true;
headers_obj.addOptions("build_options", default_build_options.step(b));
@@ -401,14 +444,14 @@ pub fn build(b: *std.build.Builder) !void {
// wasm_step.link_function_sections = true;
// wasm_step.link_emit_relocs = true;
// wasm_step.single_threaded = true;
try configureObjectStep(b, wasm_step, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, wasm_step, target, obj.main_pkg_path.?);
}
{
const headers_step = b.step("httpbench-obj", "Build HTTPBench tool (object files)");
var headers_obj: *std.build.LibExeObjStep = b.addObject("httpbench", "misctools/http_bench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -416,7 +459,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("machbench-obj", "Build Machbench tool (object files)");
var headers_obj: *std.build.LibExeObjStep = b.addObject("machbench", "misctools/machbench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -424,7 +467,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("fetch-obj", "Build fetch (object files)");
var headers_obj: *std.build.LibExeObjStep = b.addObject("fetch", "misctools/fetch.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -432,7 +475,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("string-bench", "Build string bench");
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("string-bench", "src/bench/string-handling.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -440,7 +483,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("sha-bench-obj", "Build sha bench");
var headers_obj: *std.build.LibExeObjStep = b.addObject("sha", "src/sha.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -448,7 +491,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("vlq-bench", "Build vlq bench");
var headers_obj: *std.build.LibExeObjStep = b.addExecutable("vlq-bench", "src/sourcemap/vlq_bench.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -456,7 +499,7 @@ pub fn build(b: *std.build.Builder) !void {
const headers_step = b.step("tgz-obj", "Build tgz (object files)");
var headers_obj: *std.build.LibExeObjStep = b.addObject("tgz", "misctools/tgz.zig");
defer headers_step.dependOn(&headers_obj.step);
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
headers_obj.addOptions("build_options", default_build_options.step(b));
}
@@ -474,7 +517,7 @@ pub fn build(b: *std.build.Builder) !void {
if (std.fs.path.dirname(test_bin)) |dir| headers_obj.setOutputDir(dir);
}
try configureObjectStep(b, headers_obj, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, headers_obj, target, obj.main_pkg_path.?);
try linkObjectFiles(b, headers_obj, target);
{
@@ -493,7 +536,7 @@ pub fn build(b: *std.build.Builder) !void {
test_.setMainPkgPath(obj.main_pkg_path.?);
test_.setTarget(target);
try configureObjectStep(b, test_, @TypeOf(target), target, obj.main_pkg_path.?);
try configureObjectStep(b, test_, target, obj.main_pkg_path.?);
try linkObjectFiles(b, test_, target);
test_.addOptions("build_options", default_build_options.step(b));
@@ -509,12 +552,9 @@ pub fn build(b: *std.build.Builder) !void {
headers_step.dependOn(&after.step);
}
}
if (obj.emit_bin != .no_emit)
obj.setOutputDir(output_dir);
b.default_step.dependOn(obj_step);
}
pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = null;
pub var original_make_fn: ?fn (step: *std.build.Step) anyerror!void = null;
// Due to limitations in std.build.Builder
// we cannot use this with debugging
@@ -554,18 +594,19 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar
.{ "libJavaScriptCore.a", "libJavaScriptCore.a" },
.{ "libWTF.a", "libWTF.a" },
.{ "libbmalloc.a", "libbmalloc.a" },
.{ "libbacktrace.a", "libbacktrace.a" },
.{ "liblolhtml.a", "liblolhtml.a" },
.{ "uSockets.a", "uSockets.a" },
});
for (dirs_to_search.slice()) |deps_path| {
var deps_dir = std.fs.cwd().openIterableDir(deps_path, .{}) catch continue;
var deps_dir = std.fs.cwd().openDir(deps_path, .{ .iterate = true }) catch continue;
var iterator = deps_dir.iterate();
obj.addIncludePath(deps_path);
obj.addLibraryPath(deps_path);
obj.addIncludeDir(deps_path);
obj.addLibPath(deps_path);
while (iterator.next() catch null) |entr| {
const entry: std.fs.IterableDir.Entry = entr;
const entry: std.fs.Dir.Entry = entr;
if (files_we_care_about.get(entry.name)) |obj_name| {
var has_added = try added.getOrPut(std.hash.Wyhash.hash(0, obj_name));
if (!has_added.found_existing) {
@@ -577,20 +618,17 @@ pub fn linkObjectFiles(b: *std.build.Builder, obj: *std.build.LibExeObjStep, tar
}
}
pub fn configureObjectStep(b: *std.build.Builder, obj: *std.build.LibExeObjStep, comptime Target: type, target: Target, main_pkg_path: []const u8) !void {
pub fn configureObjectStep(_: *std.build.Builder, obj: *std.build.LibExeObjStep, target: anytype, main_pkg_path: []const u8) !void {
obj.setMainPkgPath(main_pkg_path);
obj.setTarget(target);
try addInternalPackages(obj, std.heap.page_allocator, b.zig_exe, target);
try addInternalPackages(obj, std.heap.page_allocator, target);
if (target.getOsTag() != .freestanding)
addPicoHTTP(obj, false);
obj.strip = false;
obj.setOutputDir(output_dir);
obj.setBuildMode(mode);
obj.bundle_compiler_rt = true;
if (target.getOsTag() != .freestanding) obj.linkLibC();
if (target.getOsTag() != .freestanding) obj.bundle_compiler_rt = true;

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,8 +0,0 @@
[test]
# Large monorepos (like Bun) may want to specify the test directory more specifically
# By default, `bun wiptest` scans every single folder recurisvely which, if you
# have a gigantic submodule (like WebKit), it has to do lots of directory
# traversals
#
# Instead, we can just make it scan only the test directory for Bun's runtime tests
root = "test/bun.js"

View File

@@ -1,5 +1,6 @@
#/usr/bin/env bash
_file_arguments() {
shopt -s extglob globstar
local extensions="${1}";
@@ -80,9 +81,8 @@ _subcommand_comp_reply() {
_bun_completions() {
declare -A GLOBAL_OPTIONS;
declare -A PACKAGE_OPTIONS;
declare -A PM_OPTIONS;
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init";
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
@@ -95,9 +95,6 @@ _bun_completions() {
PACKAGE_OPTIONS[SHARED_OPTIONS_LONG]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --global --cwd --backend --link-native-bins --help";
PACKAGE_OPTIONS[SHARED_OPTIONS_SHORT]="-c -y -p -f -g";
PM_OPTIONS[LONG_OPTIONS]="--config --yarn --production --no-save --dry-run --lockfile --force --cache-dir --no-cache --silent --verbose --no-progress --no-summary --no-verify --ignore-scripts --global --cwd --backend --link-native-bins --help"
PM_OPTIONS[SHORT_OPTIONS]="-c -y -p -f -g"
local cur_word="${COMP_WORDS[${COMP_CWORD}]}";
local prev="${COMP_WORDS[$(( COMP_CWORD - 1 ))]}";
@@ -153,11 +150,6 @@ _bun_completions() {
COMPREPLY+=( $(compgen -W "--version --cwd --help --silent -v -h" -- "${cur_word}" ) );
_read_scripts_in_package_json;
return;;
pm)
_long_short_completion \
"${PM_OPTIONS[LONG_OPTIONS]} ${PM_OPTIONS[SHORT_OPTIONS]}";
COMPREPLY+=( $(compgen -W "bin ls cache hash hash-print hash-string" -- "${cur_word}") );
return;;
*)
local replaced_script;
_long_short_completion \

View File

@@ -53,17 +53,14 @@ end
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependenices" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add init link unlink pm x
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init pm x
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord install remove add init pm x
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run install remove add init pm x
set -l bun_builtin_cmds_without_install create dev help bun upgrade discord run remove add init pm x
set -l bun_builtin_cmds_without_remove create dev help bun upgrade discord run install add init pm x
set -l bun_builtin_cmds_without_add create dev help bun upgrade discord run remove install init pm x
set -l bun_builtin_cmds_without_pm create dev help bun upgrade discord run init pm x
# clear
complete -e -c bun
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add init link unlink
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init
set -l bun_builtin_cmds_without_bun dev create help upgrade run discord install remove add init
set -l bun_builtin_cmds_without_create dev help bun upgrade discord run install remove add init
set -l bun_builtin_cmds_without_install create dev help bun upgrade discord run remove add init
set -l bun_builtin_cmds_without_remove create dev help bun upgrade discord run install add init
set -l bun_builtin_cmds_without_add create dev help bun upgrade discord run remove install init
set -l bun_builtin_cmds_without_pm create dev help bun upgrade discord run init
complete -c bun \
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_run; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '(__fish__get_bun_scripts)' -d 'script'
@@ -105,7 +102,7 @@ complete -c bun \
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a '--help' -d 'See all commands and flags' -x
complete -c bun \
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'Bun\'s version' -x
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -l "version" -s "v" -a '--version' -d 'bun\'s version' -x
complete -c bun \
-n "not __fish_seen_subcommand_from $bun_builtin_cmds; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_use_subcommand" -a 'discord' -d 'Open bun\'s Discord server' -x
@@ -153,11 +150,5 @@ complete -c bun \
complete -c bun \
-n "not __fish_seen_subcommand_from $bun_builtin_cmds_without_pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts); and __fish_seen_subcommand_from add;" -d 'History' -a '(__history_completions)'
complete -c bun \
-n "__fish_seen_subcommand_from pm; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts) cache;" -a 'bin ls cache hash hash-print hash-string' -f
complete -c bun \
-n "__fish_seen_subcommand_from pm; and __fish_seen_subcommand_from cache; and not __fish_seen_subcommand_from (__fish__get_bun_bins) (__fish__get_bun_scripts);" -a 'rm' -f
complete -c bun -n "not __fish_seen_subcommand_from $bun_builtin_cmds (__fish__get_bun_bins) (__fish__get_bun_scripts)" -a "$bun_builtin_cmds" -f
complete -c bun --no-files

View File

@@ -1,4 +1,3 @@
#compdef bun
_bun() {
zstyle ':completion:*:*:bun:*' group-name ''
zstyle ':completion:*:*:bun-grouped:*' group-name ''
@@ -21,11 +20,10 @@ _bun() {
local -a scripts_list
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
scripts="scripts:scripts:(($scripts_list))"
IFS=$'\n' files_list=($(SHELL=zsh bun getcompletes j))
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" x\:"Run a command from a local or remote NPM package" pm\:"Manage local packages" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of bun"')
main_commands=('add\:"Add a dependency to package.json" bun\:"Generate a bundle" create\:"Create a new project" dev\:"Start a dev server" help\:"Show command help" install\:"Install packages from package.json" remove\:"Remove a dependency from package.json" run\:"Run a script or package bin" upgrade\:"Upgrade to the latest version of bun"')
main_commands=($main_commands)
_alternative "$scripts" "args:command:(($main_commands))" "files:files:(($files_list))"
_alternative "$scripts" "args:bun:(($main_commands))"
;;
args)
case $line[1] in
@@ -263,83 +261,7 @@ _bun() {
;;
x)
_arguments -s -C \
'1: :->cmd' \
'2: :->cmd2' \
'*: :->args' &&
ret=0
;;
pm)
pmargs=('-c[Load config (bunfig.toml)]'
'--config[Load config (bunfig.toml)]'
'-y[Write a yarn.lock file (yarn v1)]'
'--yarn[Write a yarn.lock file (yarn v1)]'
'-p[Do not install devDependencies]'
'--production[Do not install devDependencies]'
'--no-save[Do not save a lockfile]'
'--dry-run[Do not install anything]'
'--lockfile[Store & load a lockfile at a specific filepath]'
'-f[Always request the latest versions from the registry & reinstall all dependencies]'
'--force[Always request the latest versions from the registry & reinstall all dependencies]'
'--cache-dir[Store & load cached data from a specific directory path]'
'--no-cache[Ignore manifest cache entirely]'
'--silent[Do not log anything]'
'--verbose[Excessively verbose logging]'
'--no-progress[Disable the progress bar]'
'--no-summary[Do not print a summary]'
'--no-verify[Skip verifying integrity of newly downloaded packages]'
'--ignore-scripts[Skip lifecycle scripts in the package.json (dependency scripts are never run)]'
'-g[Install globally]'
'--global[Install globally]'
'--cwd[Set a specific cwd]'
'--backend[Platform-specific optimizations for installing dependencies. Possible values: "clonefile" (default), "hardlink", "symlink", "copyfile"]'
'--link-native-bins[Link "bin" from a matching platform-specific "optionalDependencies" instead. Default: esbuild, turbo]'
'--help[Print this help menu]'
)
# ---- Command: help
_arguments -s -C \
'1: :->cmd' \
'2: :->cmd2' \
'*: :->args' &&
ret=0
case $state in
cmd2)
_alternative 'args:cmd3:((bin ls cache hash hash-print hash-string))'
;;
args)
case $line[2] in
cache)
_arguments -s -C \
'1: :->cmd' \
'2: :->cmd2' \
':::(rm)' \
$pmargs &&
ret=0
;;
*)
_arguments -s -C \
'1: :->cmd' \
'2: :->cmd2' \
$pmargs &&
ret=0
;;
esac
;;
esac
;;
\
\
help)
help)
# ---- Command: help
_arguments -s -C \
@@ -705,8 +627,5 @@ __bun_dynamic_comp() {
return $comp
}
if ! command -v compinit >/dev/null; then
autoload -U compinit && compinit
fi
autoload -U compinit && compinit
compdef _bun bun

View File

@@ -83,7 +83,7 @@ export default {
headers: {
"Content-Type": "text/html; charset=utf-8",
},
},
}
);
},
};

View File

@@ -1,6 +1,6 @@
import { resolve } from "path";
const { write, stdout, file } = Bun;
import { argv } from "process";
const { argv } = process;
const path = resolve(argv.at(-1)!);
const path = resolve(argv.at(-1));
await write(stdout, file(path));

View File

@@ -1,5 +1,4 @@
// Start a fast HTTP server from a function
Bun.serve({
async fetch(req) {
const { pathname } = new URL(req.url);
@@ -10,21 +9,21 @@ Bun.serve({
"Enter a path that starts with https:// or http://\n",
{
status: 400,
},
}
);
}
const response = await fetch(
req.url.substring("http://localhost:3000/".length),
req.clone(),
req.clone()
);
return new HTMLRewriter()
.on("a[href]", {
element(element) {
element(element: Element) {
element.setAttribute(
"href",
"https://www.youtube.com/watch?v=dQw4w9WgXcQ",
"https://www.youtube.com/watch?v=dQw4w9WgXcQ"
);
},
})

View File

@@ -1,4 +1,4 @@
import { file, serve } from "bun";
import { file } from "bun";
serve({
fetch(req: Request) {
@@ -6,7 +6,7 @@ serve({
// If the URL is empty, display this file.
if (pathname === "") {
return new Response(file(import.meta.url.replace("file://", "")));
return new Response(file(import.meta.url));
}
return new Response(file(pathname));

View File

@@ -6,12 +6,11 @@ serve({
const body = req.body;
const writer = Bun.file(`upload.${Date.now()}.txt`).writer();
for await (const chunk of body!) {
for await (const chunk of body) {
writer.write(chunk);
}
const wrote = await writer.end();
// @ts-ignore
return Response.json({ wrote, type: req.headers.get("Content-Type") });
},
});

View File

@@ -35,7 +35,7 @@ var Handler;
try {
Handler = await import(sourcefile);
} catch (e: any) {
} catch (e) {
console.error("Error loading sourcefile:", e);
try {
await fetch(
@@ -51,7 +51,7 @@ try {
errorType: e.name,
stackTrace: e?.stack?.split("\n") ?? [],
}),
},
}
);
} catch (e2) {
console.error("Error sending error to runtime:", e2);
@@ -92,7 +92,7 @@ export default {
errorType: e.name,
stackTrace: e?.stack?.split("\n") ?? [],
}),
},
}
);
} catch (e2) {
console.error("Error sending error to runtime:", e2);
@@ -109,7 +109,7 @@ if ("baseURI" in Handler.default) {
var baseURL;
try {
baseURL = new URL(baseURLString);
} catch (e: any) {
} catch (e) {
console.error("Error parsing baseURI:", e);
try {
await fetch(
@@ -125,7 +125,7 @@ try {
errorType: e.name,
stackTrace: e?.stack?.split("\n") || [],
}),
},
}
);
} catch (e2) {
console.error("Error sending error to runtime:", e2);
@@ -147,7 +147,7 @@ async function runHandler(response: Response) {
});
// we are done with the Response object here
// allow it to be GC'd
(response as any) = undefined;
response = undefined;
var result: Response;
try {
@@ -155,10 +155,10 @@ async function runHandler(response: Response) {
console.time(`[${traceID}] Run ${request.url}`);
}
result = handlerFunction(request, {});
if (result && (result as any).then) {
if (result && result.then) {
await result;
}
} catch (e1: any) {
} catch (e1) {
if (typeof process.env.VERBOSE !== "undefined") {
console.error(`[${traceID}] Error running handler:`, e1);
}
@@ -172,7 +172,7 @@ async function runHandler(response: Response) {
errorType: e1.name,
stackTrace: e1?.stack?.split("\n") ?? [],
}),
},
}
).finally(noop);
return;
} finally {
@@ -191,7 +191,7 @@ async function runHandler(response: Response) {
errorType: "ExpectedResponseObject",
stackTrace: [],
}),
},
}
);
return;
}
@@ -202,9 +202,9 @@ async function runHandler(response: Response) {
method: "POST",
headers: result.headers,
body: await result.blob(),
},
}
);
(result as any) = undefined;
result = undefined;
}
while (true) {

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More