Compare commits

..

4 Commits

Author SHA1 Message Date
Jarred Sumner
97e2b133e3 Merge branch 'main' into jarred/request-finalizer 2023-04-26 01:31:09 -07:00
Jarred Sumner
b957087fe6 Make it clearer this refers to JS request object 2023-04-25 22:09:56 -07:00
Jarred Sumner
88d47cceb4 safer 2023-04-25 21:59:19 -07:00
Jarred Sumner
6259dfdfd1 Fix crash 2023-04-25 21:47:42 -07:00
31259 changed files with 187013 additions and 620719 deletions

View File

@@ -1,8 +0,0 @@
# https://EditorConfig.org
root = true
[*]
charset = utf-8
insert_final_newline = true
trim_trailing_whitespace = true
end_of_line = lf

45
.gitattributes vendored
View File

@@ -1,22 +1,3 @@
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
.vscode/launch.json linguist-generated
src/api/schema.d.ts linguist-generated
fixture.*.c linguist-generated
@@ -24,30 +5,6 @@ src/api/schema.js linguist-generated
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
*.lockb binary diff=lockb
*.zig text eol=lf
src/bun.js/bindings/simdutf.cpp linguist-vendored
src/bun.js/bindings/simdutf.h linguist-vendored
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
src/js/out/WebCoreJSBuiltins.h linguist-generated
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
src/bun.js/bindings/headers.h linguist-generated
src/bun.js/bindings/headers.zig linguist-generated
src/bun.js/bindings/JSSink.h linguist-generated
src/bun.js/bindings/JSSink.zig linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
docs/**/* linguist-documentation
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated

View File

@@ -0,0 +1,35 @@
name: 📥 Install Problem
description: Report an issue during install or upgrade
labels: [bug, install]
body:
- type: markdown
attributes:
value: |
Thank you for submitting a bug report. It helps make Bun better.
If you need help or support using Bun, and are not reporting an issue, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Please try to include as much information as possible.
- type: input
attributes:
label: What platform is your computer?
description: |
For MacOS and Linux: copy the output of `uname -mprs`
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
- type: textarea
attributes:
label: How did you attempt to install or upgrade?
description: Please provide the commands you ran to install or upgrade.
validations:
required: true
- type: textarea
attributes:
label: What do you see instead?
description: If possible, please provide text instead of a screenshot.
validations:
required: true
- type: textarea
attributes:
label: Additional information
description: Is there anything else you think we should know?

View File

@@ -10,15 +10,11 @@ body:
If you need help or support using Bun, and are not reporting a bug, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
The bug you are experiencing may already have been fixed.
Please try to include as much information as possible.
- type: input
attributes:
label: What version of Bun is running?
description: Copy the output of `bun --revision`
description: Copy the output of `bun -v`
- type: input
attributes:
label: What platform is your computer?

View File

@@ -8,7 +8,7 @@ body:
Thank you for submitting an idea. It helps make Bun better.
If you want to discuss Bun, or learn how others are using Bun, please
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
- type: textarea
attributes:
label: What is the problem this feature would solve?

View File

@@ -1,62 +0,0 @@
### What does this PR do?
<!-- **Please explain what your changes do**, example: -->
<!--
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
-->
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
- [ ] Code changes
### How did you verify your code works?
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
<!-- I wrote automated tests -->
<!-- If JavaScript/TypeScript modules or builtins changed:
- [ ] I ran `make js` and committed the transpiled changes
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
- [ ] I included a test for the new code, or an existing test covers it
-->
<!-- If Zig files changed:
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
- [ ] I or my editor ran `zig fmt` on the changed files
- [ ] I included a test for the new code, or an existing test covers it
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
-->
<!-- If new methods, getters, or setters were added to a publicly exposed class:
- [ ] I added TypeScript types for the new methods, getters, or setters
-->
<!-- If dependencies in tests changed:
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
-->
<!-- If functions were added to exports.zig or bindings.zig
- [ ] I ran `make headers` to regenerate the C header file
-->
<!-- If \*.classes.ts files were added or changed:
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
-->
<!-- If a new builtin ESM/CJS module was added:
- [ ] I updated Aliases in `module_loader.zig` to include the new module
- [ ] I added a test that imports the module
- [ ] I added a test that require() the module
-->

View File

@@ -12,7 +12,6 @@ jobs:
deploy:
name: Deploy site
runs-on: ubuntu-latest
if: github.repository_owner == 'oven-sh'
steps:
- name: Trigger Vercel build
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}
run: curl ${{ secrets.VERCEL_DEPLOY_HOOK }}

View File

@@ -0,0 +1,41 @@
name: bun-framework-next
on:
push:
paths:
- packages/bun-framework-next/**/*
branches: [main, bun-framework-next-actions]
pull_request:
paths:
- packages/bun-framework-next/**/*
branches: [main]
jobs:
build:
name: lint, test and build on Node ${{ matrix.node }} and ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
node: ["14.x"]
os: [macOS-latest]
steps:
- name: Checkout repo
uses: actions/checkout@v2
- name: Use Node ${{ matrix.node }}
uses: actions/setup-node@v2
with:
node-version: ${{ matrix.node }}
- name: Install PNPM
uses: pnpm/action-setup@v2.0.1
with:
version: 6.21.0
- name: Install dependencies
run: cd packages/bun-framework-next && pnpm install
- name: Type check bun-framework-next
run: cd packages/bun-framework-next && pnpm check

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -26,9 +21,7 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -37,7 +30,7 @@ jobs:
arch: aarch64
build_arch: arm64
runner: linux-arm64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-linux-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-arm64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-arm64-lto"
build_machine_arch: aarch64

View File

@@ -1,9 +1,4 @@
name: bun-linux
concurrency:
group: bun-linux-build-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -35,9 +30,7 @@ jobs:
linux:
name: ${{matrix.tag}}
runs-on: ${{matrix.runner}}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
permissions: write-all
strategy:
fail-fast: false
matrix:
@@ -47,7 +40,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
- cpu: nehalem
@@ -55,7 +48,7 @@ jobs:
arch: x86_64
build_arch: amd64
runner: big-ubuntu
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-linux-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-linux-amd64-lto.tar.gz"
webkit_basename: "bun-webkit-linux-amd64-lto"
build_machine_arch: x86_64
@@ -153,33 +146,13 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
linux-test:
name: Tests ${{matrix.tag}}
runs-on: ubuntu-latest
needs: [linux]
if: github.event_name == 'pull_request'
timeout-minutes: 20
permissions:
pull-requests: write
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -201,48 +174,23 @@ jobs:
with:
name: bun-${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip bun-${{matrix.tag}}.zip
cd bun-${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install-dependnecies
name: Install dependencies
run: |
sudo apt-get update && sudo apt-get install -y openssl
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -1,9 +1,4 @@
name: bun-macOS-aarch64
concurrency:
group: bun-macOS-aarch64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -115,36 +108,36 @@ jobs:
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: native
@@ -152,14 +145,14 @@ jobs:
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
dependencies: true
compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -172,11 +165,11 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -244,10 +237,8 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -256,29 +247,29 @@ jobs:
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: native
arch: aarch64
tag: bun-darwin-aarch64
obj: bun-obj-darwin-aarch64
package: bun-darwin-aarch64
artifact: bun-obj-darwin-aarch64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-arm64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -291,10 +282,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -393,33 +384,12 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
timeout-minutes: 30
if: github.event_name == 'pull_request'
timeout-minutes: 10
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
@@ -441,47 +411,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64-baseline
concurrency:
group: bun-macOS-x64-baseline-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -115,36 +108,36 @@ jobs:
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: nehalem
arch: x86_64
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
# - cpu: native
@@ -152,14 +145,14 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -172,11 +165,11 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix ccache)/bin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (dependencies)
uses: hendrikmuhs/ccache-action@v1.2
if: matrix.dependencies
@@ -245,10 +238,8 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -257,29 +248,29 @@ jobs:
tag: bun-darwin-x64-baseline
obj: bun-obj-darwin-x64-baseline
package: bun-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64-baseline
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: haswell
# arch: x86_64
# tag: bun-darwin-x64
# obj: bun-obj-darwin-x64
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -292,10 +283,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install ccache rust llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: ccache (link)
uses: hendrikmuhs/ccache-action@v1.2
with:
@@ -397,32 +388,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
if: github.event_name == 'pull_request'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
@@ -432,7 +402,7 @@ jobs:
matrix:
include:
- tag: bun-darwin-x64-baseline
runner: macos-12
runner: macos-11
steps:
- id: checkout
name: Checkout
@@ -445,50 +415,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
Hey @${{ github.actor }},
${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

View File

@@ -1,9 +1,4 @@
name: bun-macOS-x64
concurrency:
group: bun-macOS-x64-${{ github.ref }}
cancel-in-progress: true
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
@@ -33,7 +28,6 @@ jobs:
macos-object-files:
name: macOS Object
runs-on: med-ubuntu
if: github.repository_owner == 'oven-sh'
strategy:
matrix:
include:
@@ -106,7 +100,6 @@ jobs:
macOS-cpp:
name: macOS C++
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
timeout-minutes: 90
strategy:
matrix:
@@ -115,36 +108,36 @@ jobs:
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: true
# compile_obj: false
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: true
compile_obj: false
# - cpu: nehalem
# arch: x86_64
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# dependencies: false
# compile_obj: true
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
dependencies: false
compile_obj: true
# - cpu: native
@@ -152,14 +145,14 @@ jobs:
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
# dependencies: true
# compile_obj: true
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -172,10 +165,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
if: matrix.compile_obj
env:
@@ -247,10 +240,8 @@ jobs:
macOS:
name: macOS Link
runs-on: ${{ matrix.runner }}
if: github.repository_owner == 'oven-sh'
needs: [macOS-cpp, macos-object-files]
timeout-minutes: 90
permissions: write-all
strategy:
matrix:
include:
@@ -259,29 +250,29 @@ jobs:
# tag: bun-darwin-x64-baseline
# obj: bun-obj-darwin-x64-baseline
# package: bun-darwin-x64
# runner: macos-12
# runner: macos-11
# artifact: bun-obj-darwin-x64-baseline
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
- cpu: haswell
arch: x86_64
tag: bun-darwin-x64
obj: bun-obj-darwin-x64
package: bun-darwin-x64
runner: macos-12
runner: macos-11
artifact: bun-obj-darwin-x64
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-amd64-lto.tar.gz"
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-amd64-lto.tar.gz"
# - cpu: native
# arch: aarch64
# tag: bun-darwin-aarch64
# obj: bun-obj-darwin-aarch64
# package: bun-darwin-aarch64
# artifact: bun-obj-darwin-aarch64
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3/bun-webkit-macos-arm64-lto.tar.gz"
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/apr12-23/bun-webkit-macos-arm64-lto.tar.gz"
# runner: macos-arm64
steps:
- uses: actions/checkout@v3
- name: Checkout submodules
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu)
- name: Install dependencies
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -294,10 +285,10 @@ jobs:
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
run: |
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@16
brew install rust ccache llvm@15 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
echo "export PATH=$(brew --prefix coreutils)/libexec/gnubin:\$PATH" >> $GITHUB_ENV
echo "export PATH=$(brew --prefix llvm@15)/bin:\$PATH" >> $GITHUB_ENV
brew link --overwrite llvm@15
- name: Download WebKit
env:
CPU_TARGET: ${{ matrix.cpu }}
@@ -399,32 +390,11 @@ jobs:
name: "Canary (${{github.sha}})"
tag: "canary"
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
- uses: sarisia/actions-status-discord@v1
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: ${{ job.status }}
noprefix: true
nocontext: true
description: |
Pull Request
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}
Build failed on ${{ matrix.tag }}:
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
macOS-test:
name: Tests ${{matrix.tag}}
runs-on: ${{ matrix.runner }}
needs: [macOS]
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
permissions:
pull-requests: write
if: github.event_name == 'pull_request'
timeout-minutes: 30
outputs:
failing_tests: ${{ steps.test.outputs.failing_tests }}
@@ -434,7 +404,7 @@ jobs:
matrix:
include:
- tag: bun-darwin-x64
runner: macos-12
runner: macos-11
steps:
- id: checkout
name: Checkout
@@ -447,47 +417,23 @@ jobs:
with:
name: ${{matrix.tag}}
path: ${{runner.temp}}/release
- id: install-bun
name: Install Bun
- id: install
name: Install
run: |
cd ${{runner.temp}}/release
unzip ${{matrix.tag}}.zip
cd ${{matrix.tag}}
chmod +x bun
pwd >> $GITHUB_PATH
./bun --version
- id: install
name: Install dependencies
run: |
bun install --verbose
bun install --cwd=test --verbose
bun install --cwd=packages/bun-internal-test --verbose
sudo mv bun /usr/local/bin/bun
bun --version
- id: test
name: Test (node runner)
env:
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
# if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test
bun install --cwd packages/bun-internal-test
node packages/bun-internal-test/src/runner.node.mjs || true
- uses: sarisia/actions-status-discord@v1
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
with:
title: ""
webhook: ${{ secrets.DISCORD_WEBHOOK }}
status: "failure"
noprefix: true
nocontext: true
description: |
Pull Request
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
${{ steps.test.outputs.failing_tests }}
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
- name: Comment on PR
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
uses: thollander/actions-comment-pull-request@v2

175
.github/workflows/bun-release-canary.yml vendored Normal file
View File

@@ -0,0 +1,175 @@
name: bun-release-canary
concurrency: release-canary
on:
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
npm:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-npm -- canary publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
# npm-types:
# name: Release types to NPM
# runs-on: ubuntu-latest
# defaults:
# run:
# working-directory: packages/bun-types
# steps:
# - id: checkout
# name: Checkout
# uses: actions/checkout@v3
# - id: setup-node
# name: Setup Node.js
# uses: actions/setup-node@v3
# with:
# node-version: latest
# - id: setup-bun
# name: Setup Bun
# uses: oven-sh/setup-bun@v1
# with:
# bun-version: canary
# - id: bun-install
# name: Install Dependencies
# run: bun install
# - id: setup-env
# name: Setup Environment
# run: |
# SHA=$(git rev-parse --short "$GITHUB_SHA")
# VERSION=$(bun --version)
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
# echo "Setup tag: ${TAG}"
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
# - id: bun-run
# name: Build
# run: bun run build
# env:
# BUN_VERSION: ${{ env.TAG }}
# - id: npm-publish
# name: Release
# uses: JS-DevTools/npm-publish@v1
# with:
# package: packages/bun-types/dist/package.json
# token: ${{ secrets.NPM_TOKEN }}
# tag: canary
docker:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
uses: docker/setup-buildx-action@v2
with:
platforms: linux/amd64,linux/arm64
- id: metadata
name: Setup Docker metadata
uses: docker/metadata-action@v4
with:
images: oven/bun
tags: canary
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=canary
s3:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
defaults:
run:
working-directory: packages/bun-release
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: bun-run
name: Release
run: bun upload-s3 -- canary
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun

View File

@@ -0,0 +1,53 @@
name: bun-release-canary
concurrency: release-canary
on:
push:
branches:
- main
paths:
- "packages/bun-types/**"
workflow_dispatch:
jobs:
npm-types:
name: Release types to NPM
runs-on: ubuntu-latest
defaults:
run:
working-directory: packages/bun-types
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- id: setup-env
name: Setup Environment
run: |
SHA=$(git rev-parse --short "$GITHUB_SHA")
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary

View File

@@ -1,71 +1,51 @@
name: bun-release
concurrency: release
env:
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
BUN_LATEST: ${{ github.event.inputs.is-latest || github.event.release.prerelease == 'false' }}
on:
release:
types:
- published
schedule:
- cron: "0 14 * * *" # every day at 6am PST
workflow_dispatch:
inputs:
is-latest:
description: Is this the latest release?
type: boolean
default: false
tag:
type: string
description: What is the release tag? (e.g. "1.0.2", "canary")
description: The tag to publish
required: true
use-docker:
description: Should Docker images be released?
type: boolean
default: false
use-npm:
description: Should npm packages be published?
type: boolean
default: false
use-homebrew:
description: Should binaries be released to Homebrew?
type: boolean
default: false
use-s3:
description: Should binaries be uploaded to S3?
type: boolean
default: false
use-types:
description: Should types be released to npm?
type: boolean
default: false
jobs:
sign:
name: Sign Release
runs-on: ubuntu-latest
if: ${{ github.repository_owner == 'oven-sh' }}
permissions:
contents: write
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup GPG
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Bun
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Sign Release
- id: bun-run
name: Sign Release
run: |
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
@@ -73,23 +53,31 @@ jobs:
name: Release to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Bun
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Release
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
- id: bun-run
name: Release
run: bun upload-npm -- "${{ env.TAG }}" publish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
@@ -97,45 +85,40 @@ jobs:
name: Release types to NPM
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-types
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Node.js
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-node
name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: latest
- name: Setup Bun
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Setup Tag
if: ${{ env.BUN_VERSION == 'canary' }}
run: |
VERSION=$(bun --version)
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- name: Build
- id: bun-run
name: Build
run: bun run build
env:
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
- name: Release (canary)
if: ${{ env.BUN_VERSION == 'canary' }}
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
token: ${{ secrets.NPM_TOKEN }}
tag: canary
- name: Release (latest)
if: ${{ env.BUN_LATEST == 'true' }}
BUN_VERSION: ${{ env.TAG }}
- id: npm-publish
name: Release
uses: JS-DevTools/npm-publish@v1
with:
package: packages/bun-types/dist/package.json
@@ -144,28 +127,19 @@ jobs:
name: Release to Dockerhub
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
permissions:
contents: read
strategy:
fail-fast: false
matrix:
include:
- variant: debian
suffix: ''
- variant: debian
suffix: -debian
- variant: slim
suffix: -slim
dir: debian-slim
- variant: alpine
suffix: -alpine
- variant: distroless
suffix: -distroless
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Docker emulator
- id: environment
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: qemu
name: Setup Docker QEMU
uses: docker/setup-qemu-action@v2
- id: buildx
name: Setup Docker buildx
@@ -177,60 +151,66 @@ jobs:
uses: docker/metadata-action@v4
with:
images: oven/bun
flavor: |
latest=false
tags: |
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
- name: Login to Docker
type=match,pattern=(bun-v)?(\d.\d.\d),group=2,value=${{ env.TAG }}
type=match,pattern=(bun-v)?(\d.\d),group=2,value=${{ env.TAG }}
- id: login
name: Login to Docker
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Push to Docker
- id: push
name: Push to Docker
uses: docker/build-push-action@v3
with:
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
context: ./dockerhub
file: ./dockerhub/Dockerfile-debian
platforms: linux/amd64,linux/arm64
builder: ${{ steps.buildx.outputs.name }}
push: true
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
build-args: |
BUN_VERSION=${{ env.BUN_VERSION }}
BUN_VERSION=${{ env.TAG }}
homebrew:
name: Release to Homebrew
runs-on: ubuntu-latest
needs: sign
permissions:
contents: read
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
repository: oven-sh/homebrew-bun
token: ${{ secrets.ROBOBUN_TOKEN }}
- id: gpg
- id: setup-gpg
name: Setup GPG
uses: crazy-max/ghaction-import-gpg@v5
with:
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
passphrase: ${{ secrets.GPG_PASSPHRASE }}
- name: Setup Ruby
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-ruby
name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: "2.6"
- name: Update Tap
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
- name: Commit Tap
- id: update-tap
name: Update Tap
run: ruby scripts/release.rb "${{ env.TAG }}"
- id: commit-tap
name: Commit Tap
uses: stefanzweifel/git-auto-commit-action@v4
with:
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
commit_message: Release ${{ env.BUN_VERSION }}
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
commit_message: Release ${{ env.TAG }}
commit_user_name: robobun
commit_user_email: robobun@oven.sh
commit_author: robobun <robobun@oven.sh>
@@ -238,23 +218,31 @@ jobs:
name: Upload to S3
runs-on: ubuntu-latest
needs: sign
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
permissions:
contents: read
defaults:
run:
working-directory: packages/bun-release
steps:
- name: Checkout
- id: checkout
name: Checkout
uses: actions/checkout@v3
- name: Setup Bun
- id: setup-env
name: Setup Environment
run: |
TAG="${{ github.event.inputs.tag }}"
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
echo "Setup tag: ${TAG}"
echo "TAG=${TAG}" >> ${GITHUB_ENV}
- id: setup-bun
name: Setup Bun
uses: oven-sh/setup-bun@v1
with:
bun-version: latest
- name: Install Dependencies
bun-version: canary
- id: bun-install
name: Install Dependencies
run: bun install
- name: Release
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
- id: bun-run
name: Release
run: bun upload-s3 -- "${{ env.TAG }}"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}

34
.github/workflows/bun-typecheck.yml vendored Normal file
View File

@@ -0,0 +1,34 @@
name: typecheck
on:
push:
branches: [main]
paths:
- "packages/bun-types/**"
- "test/**"
pull_request:
branches:
- main
paths:
- "packages/bun-types/**"
- "test/**"
jobs:
tests:
name: check-tests
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Install bun
uses: oven-sh/setup-bun@v1
with:
bun-version: canary
- name: Install dependencies
run: bun install
- name: Install dependencies
run: bun install
working-directory: test
- name: Typecheck tests
run: bun run typecheck

View File

@@ -12,8 +12,6 @@ jobs:
prettier-fmt:
name: prettier
runs-on: ubuntu-latest
permissions:
pull-requests: write
outputs:
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
steps:

59
.github/workflows/run-test-manually.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
name: Run Tests Manually
on:
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
inputs:
version:
description: "Version"
required: true
default: "canary"
type: string
use_bun:
description: "Use Bun?"
required: true
default: true
type: boolean
jobs:
linux-test:
name: Tests ${{matrix.tag}} ${{github.event.inputs.version}}
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
include:
- tag: linux-x64
- tag: linux-x64-baseline
steps:
- id: checkout
name: Checkout
uses: actions/checkout@v3
with:
submodules: false
- id: install-npm
name: Install (npm)
run: |
npm install @oven/bun-${{matrix.tag}}@${{github.event.inputs.version}}
chmod +x node_modules/@oven/bun-${{matrix.tag}}/bin/bun
sudo cp node_modules/@oven/bun-${{matrix.tag}}/bin/bun /usr/bin/bun
- id: test
name: Test
if: ${{github.event.inputs.use_bun == 'true'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
bun run test
- id: test-node-runner
name: Test (node runner)
if: ${{github.event.inputs.use_bun == 'false'}}
run: |
bun install
bun install --cwd test/bun.js
bun install --cwd test/bun.js/third-party/body-parser-test
cd packages/bun-internal-test
bun install
node src/runner.node.mjs

View File

@@ -1,7 +1,7 @@
name: zig-fmt
env:
ZIG_VERSION: 0.12.0-dev.163+6780a6bbf
ZIG_VERSION: 0.11.0-dev.2571+31738de28
on:
pull_request:
@@ -18,8 +18,6 @@ jobs:
zig-fmt:
name: zig fmt
runs-on: ubuntu-latest
permissions:
pull-requests: write
outputs:
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
steps:
@@ -30,7 +28,7 @@ jobs:
run: |
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
tar -xf zig.tar.xz
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
sudo mv zig-linux-x86_64-${{env.ZIG_VERSION}}/zig /usr/local/bin
- name: Run zig fmt
id: fmt
run: |

19
.gitignore vendored
View File

@@ -6,7 +6,6 @@ packages/*/*.wasm
profile.json
node_modules
.envrc
.swcrc
yarn.lock
dist
@@ -97,8 +96,6 @@ packages/bun-wasm/*.cjs
packages/bun-wasm/*.map
packages/bun-wasm/*.js
packages/bun-wasm/*.d.ts
packages/bun-wasm/*.d.cts
packages/bun-wasm/*.d.mts
*.bc
src/fallback.version
@@ -119,19 +116,3 @@ src/bun.js/debug-bindings-obj
failing-tests.txt
test.txt
myscript.sh
cold-jsc-start
cold-jsc-start.d
/test.ts
/test.js
src/js/out/modules*
src/js/out/functions*
src/js/out/tmp
src/js/out/DebugPath.h
make-dev-stats.csv
.uuid
tsconfig.tsbuildinfo

17
.gitmodules vendored
View File

@@ -48,6 +48,13 @@ ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/uws"]
path = src/deps/uws
url = https://github.com/Jarred-Sumner/uWebSockets
ignore = dirty
depth = 1
shallow = true
fetchRecurseSubmodules = true
[submodule "src/deps/tinycc"]
path = src/deps/tinycc
url = https://github.com/Jarred-Sumner/tinycc.git
@@ -58,13 +65,3 @@ fetchRecurseSubmodules = false
[submodule "src/deps/c-ares"]
path = src/deps/c-ares
url = https://github.com/c-ares/c-ares.git
[submodule "src/deps/zstd"]
path = src/deps/zstd
url = https://github.com/facebook/zstd.git
ignore = dirty
[submodule "src/deps/base64"]
path = src/deps/base64
url = https://github.com/aklomp/base64.git
ignore = dirty
depth = 1
shallow = true

View File

@@ -1,6 +1,6 @@
src/fallback.html
src/bun.js/WebKit
src/js/out
src/bun.js/builtins/js
src/*.out.js
src/*out.*.js
src/deps
@@ -10,4 +10,3 @@ test/snapshots
test/snapshots-no-hmr
test/js/deno/*.test.ts
test/js/deno/**/*.test.ts
bench/react-hello-world/react-hello-world.node.js

View File

@@ -6,7 +6,7 @@ module.exports = {
quoteProps: "preserve",
overrides: [
{
files: ["*.md"],
files: "README.md",
options: {
printWidth: 80,
},

View File

@@ -1,21 +0,0 @@
// I would have made this a bash script but there isn't an easy way to track
// time in bash sub-second cross platform.
import fs from "fs";
const start = Date.now() + 5;
const result = Bun.spawnSync(process.argv.slice(2), {
stdio: ["inherit", "inherit", "inherit"],
});
const end = Date.now();
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
const success = result.exitCode === 0;
try {
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
} else {
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
}
} catch {
// Ignore
}
process.exit(result.exitCode);

View File

@@ -10,4 +10,4 @@ fi
# sets up vscode C++ intellisense
rm -f .vscode/clang++
ln -s $(which clang++-16 || which clang++) .vscode/clang++ 2>/dev/null
ln -s $(which clang++-15 || which clang++) .vscode/clang++ 2>/dev/null

3
.scripts/write-versions.sh Executable file → Normal file
View File

@@ -7,9 +7,11 @@ LIBARCHIVE_VERSION=$(git rev-parse HEAD:./src/deps/libarchive)
PICOHTTPPARSER_VERSION=$(git rev-parse HEAD:./src/deps/picohttpparser)
BORINGSSL_VERSION=$(git rev-parse HEAD:./src/deps/boringssl)
ZLIB_VERSION=$(git rev-parse HEAD:./src/deps/zlib)
UWS_VERSION=$(git rev-parse HEAD:./src/deps/uws)
LOLHTML=$(git rev-parse HEAD:./src/deps/lol-html)
TINYCC=$(git rev-parse HEAD:./src/deps/tinycc)
C_ARES=$(git rev-parse HEAD:./src/deps/c-ares)
USOCKETS=$(cd src/deps/uws/uSockets && git rev-parse HEAD)
rm -rf src/generated_versions_list.zig
echo "// AUTO-GENERATED FILE. Created via .scripts/write-versions.sh" >src/generated_versions_list.zig
@@ -18,6 +20,7 @@ echo "pub const boringssl = \"$BORINGSSL_VERSION\";" >>src/generated_versions_li
echo "pub const libarchive = \"$LIBARCHIVE_VERSION\";" >>src/generated_versions_list.zig
echo "pub const mimalloc = \"$MIMALLOC_VERSION\";" >>src/generated_versions_list.zig
echo "pub const picohttpparser = \"$PICOHTTPPARSER_VERSION\";" >>src/generated_versions_list.zig
echo "pub const uws = \"$UWS_VERSION\";" >>src/generated_versions_list.zig
echo "pub const webkit = \"$WEBKIT_VERSION\";" >>src/generated_versions_list.zig
echo "pub const zig = @import(\"std\").fmt.comptimePrint(\"{}\", .{@import(\"builtin\").zig_version});" >>src/generated_versions_list.zig
echo "pub const zlib = \"$ZLIB_VERSION\";" >>src/generated_versions_list.zig

View File

@@ -15,14 +15,11 @@
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/modules/",
"${workspaceFolder}/src/js/builtins/",
"${workspaceFolder}/src/js/out",
"${workspaceFolder}/src/bun.js/builtins/",
"${workspaceFolder}/src/bun.js/builtins/cpp",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/packages/bun-usockets/src",
"${workspaceFolder}/packages/"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"browse": {
"path": [
@@ -34,19 +31,14 @@
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/bun.js/bindings/*",
"${workspaceFolder}/src/napi/*",
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
"${workspaceFolder}/src/bun.js/bindings/webcore/",
"${workspaceFolder}/src/js/builtins/*",
"${workspaceFolder}/src/js/out/*",
"${workspaceFolder}/src/bun.js/builtins/*",
"${workspaceFolder}/src/bun.js/builtins/cpp/*",
"${workspaceFolder}/src/bun.js/modules/*",
"${workspaceFolder}/src/deps",
"${workspaceFolder}/src/deps/boringssl/include/",
"${workspaceFolder}/packages/bun-usockets/",
"${workspaceFolder}/packages/bun-uws/",
"${workspaceFolder}/src/napi"
"${workspaceFolder}/src/deps/uws/uSockets/src"
],
"limitSymbolsToIncludedHeaders": true,
"databaseFilename": ".vscode/cppdb"

217
.vscode/launch.json generated vendored
View File

@@ -1,22 +1,39 @@
{
// The usage of BUN_GARBAGE_COLLECTOR_LEVEL=2 is important for debugging
// It will force the garbage collector to run after every test and every call to expect()
// it makes our tests very slow
// But it helps catch memory bugs
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "bun test [file]",
"name": "bun test",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"args": ["wiptest", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test --watch",
"program": "bun-debug",
"args": ["--watch", "test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test (all)",
"program": "bun-debug",
"args": ["wiptest"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
@@ -24,14 +41,48 @@
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (fast)",
"name": "bun run current file",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"args": ["${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (watch)",
"program": "bun-debug",
"args": ["--watch", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run (hot)",
"program": "bun-debug",
"args": ["--hot", "${file}"],
"cwd": "${file}/../../",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run",
"program": "bun-debug",
"args": ["check.tsx", "-c"],
"cwd": "${env:HOME}/Build/react-ssr",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
@@ -39,11 +90,10 @@
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] (verbose)",
"name": "bun http example",
"program": "bun-debug",
"args": ["test", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"args": ["run", "examples/http.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
@@ -52,107 +102,10 @@
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --watch",
"name": "bun http file example",
"program": "bun-debug",
"args": ["test", "--watch", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [file] --only",
"program": "bun-debug",
"args": ["test", "--only", "${file}"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*]",
"program": "bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] (fast)",
"program": "bun-debug",
"args": ["test"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun test [*] --only",
"program": "bun-debug",
"args": ["test", "--only"],
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file]",
"program": "bun-debug",
"args": ["run", "${file}", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"NODE_ENV": "development"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (gc)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] (verbose)",
"program": "bun-debug",
"args": ["run", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/http-file.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
@@ -161,27 +114,16 @@
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --watch",
"name": "bun html-rewriter example",
"program": "bun-debug",
"args": ["run", "--watch", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
"name": "bun run [file] --hot",
"program": "bun-debug",
"args": ["run", "--hot", "${file}"],
"cwd": "${fileDirname}",
"args": ["run", "examples/bun/html-rewriter.ts"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1"
},
"console": "internalConsole"
},
{
"type": "lldb",
"request": "launch",
@@ -196,7 +138,7 @@
"request": "launch",
"name": "bun build debug",
"program": "bun-debug",
"args": ["bun", "${file}"],
"args": ["bun", "./test/fixtures/bundle/trivial/index.js"],
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
@@ -225,6 +167,7 @@
"console": "internalConsole",
"env": {}
},
{
"type": "lldb",
"request": "launch",
@@ -258,7 +201,7 @@
"--splitting",
"bun",
"/Users/jarred/Code/bun-rsc/components/Message.tsx",
"/Users/jarred/Code/bun-rsc/components/Button.tsx"
"/Users/jarred/Code/bun-rsc/components/Button.tsx",
],
"cwd": "/Users/jarred/Code/bun-rsc",
"console": "internalConsole",
@@ -307,7 +250,7 @@
"name": "bun install",
"program": "bun-debug",
"args": ["install"],
"cwd": "${fileDirname}",
"cwd": "${workspaceFolder}",
"console": "internalConsole",
"env": {
"BUN_DEBUG_QUIET_LOGS": "1"

27
.vscode/settings.json vendored
View File

@@ -7,8 +7,6 @@
"search.followSymlinks": false,
"search.useIgnoreFiles": true,
"zig.buildOnSave": false,
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
"zig.formattingProvider": "extension",
"zig.buildArgs": ["obj", "-Dfor-editor"],
"zig.buildOption": "build",
"zig.buildFilePath": "${workspaceFolder}/build.zig",
@@ -27,8 +25,7 @@
"editor.formatOnSave": true
},
"zig.zls.enableInlayHints": false,
"zig.zls.enabled": true,
"git.ignoreSubmodules": true,
"[jsx]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
"editor.formatOnSave": true
@@ -40,6 +37,8 @@
"[yaml]": {
"editor.formatOnSave": true
},
"zig.beforeDebugCmd": "make build-unit ${file} ${filter} ${bin}",
"zig.testCmd": "make test ${file} ${filter} ${bin}",
"[markdown]": {
"editor.unicodeHighlight.ambiguousCharacters": false,
"editor.unicodeHighlight.invisibleCharacters": false,
@@ -78,12 +77,7 @@
"src/deps/lol-html": true,
"src/deps/c-ares": true,
"src/deps/tinycc": true,
"src/deps/zstd": true,
"test/snippets/package-json-exports/_node_modules_copy": true,
"src/js/out": true,
"packages/bun-uws/fuzzing/seed-corpus/": true,
"**/*.dep": true,
"**/CMakeFiles": true
"test/snippets/package-json-exports/_node_modules_copy": true
},
"C_Cpp.files.exclude": {
"**/.vscode": true,
@@ -210,18 +204,7 @@
"compare": "cpp",
"concepts": "cpp",
"typeindex": "cpp",
"__verbose_abort": "cpp",
"__std_stream": "cpp",
"any": "cpp",
"charconv": "cpp",
"csignal": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"regex": "cpp",
"span": "cpp",
"valarray": "cpp",
"codecvt": "cpp"
"__verbose_abort": "cpp"
},
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",

View File

@@ -47,18 +47,32 @@ TODO: document this (see [`bindings.zig`](src/bun.js/bindings/bindings.zig) and
Copy from examples like `Subprocess` or `Response`.
### ESM Modules and Builtins JS
### ESM modules
Bun implements ESM modules in a mix of native code and JavaScript.
Several Node.js modules are implemented in JavaScript and loosely based on browserify polyfills.
Builtin modules in Bun are located in [`src/js`](src/js/). These files are transpiled and support a JavaScriptCore-only syntax for internal slots, which is explained further in [`src/js/README.md`](src/js/README.md).
Native C++ modules are in `src/bun.js/modules/`.
The ESM modules in Bun are located in [`src/bun.js/*.exports.js`](src/bun.js/). Unlike other code in Bun, these files are NOT transpiled. They are loaded directly into the JavaScriptCore VM. That means `require` does not work in these files. Instead, you must use `import.meta.require`, or ideally, not use require/import other files at all.
The module loader is in [`src/bun.js/module_loader.zig`](src/bun.js/module_loader.zig).
### JavaScript Builtins
JavaScript builtins are located in [`src/bun.js/builtins/*.js`](src/bun.js/builtins).
These files support a JavaScriptCore-only syntax for internal slots. `@` is used to access an internal slot. For example: `new @Array(123)` will create a new `Array` similar to `new Array(123)`, except if a library modifies the `Array` global, it will not affect the internal slot (`@Array`). These names must be allow-listed in `BunBuiltinNames.h` (though JavaScriptCore allowlists some names by default).
They can not use or reference ESM-modules. The files that end with `*Internals.js` are automatically loaded globally. Most usage of internals right now are the stream implementations (which share a lot of code from Safari/WebKit) and ImportMetaObject (which is how `require` is implemented in the runtime)
To regenerate the builtins:
```sh
make clean-bindings && make generate-builtins && make bindings -j10
```
It is recommended that you have ccache installed or else you will spend a lot of time waiting for the bindings to compile.
### Memory management in Bun's JavaScript runtime
TODO: fill this out (for now, use `JSC.Strong` in most cases)

View File

@@ -10,9 +10,9 @@ ARG ARCH=x86_64
ARG BUILD_MACHINE_ARCH=x86_64
ARG TRIPLET=${ARCH}-linux-gnu
ARG BUILDARCH=amd64
ARG WEBKIT_TAG=2023-oct3
ARG WEBKIT_TAG=feb9
ARG ZIG_TAG=jul1
ARG ZIG_VERSION="0.12.0-dev.163+6780a6bbf"
ARG ZIG_VERSION="0.11.0-dev.2571+31738de28"
ARG WEBKIT_BASENAME="bun-webkit-linux-$BUILDARCH"
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
@@ -20,17 +20,19 @@ ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
ARG WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/$WEBKIT_TAG/${WEBKIT_BASENAME}.tar.gz"
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
ARG GIT_SHA=""
ARG BUN_BASE_VERSION=1.0
ARG BUN_BASE_VERSION=0.6
FROM bitnami/minideb:bullseye as bun-base
RUN install_packages ca-certificates curl wget lsb-release software-properties-common gnupg gnupg1 gnupg2 && \
echo "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-16 main" > /etc/apt/sources.list.d/llvm.list && \
echo "deb-src https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-16 main" >> /etc/apt/sources.list.d/llvm.list && \
wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add - && \
curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages \
RUN install_packages ca-certificates curl wget lsb-release software-properties-common gnupg gnupg1 gnupg2
RUN wget https://apt.llvm.org/llvm.sh && \
chmod +x llvm.sh && \
./llvm.sh 15
RUN install_packages \
cmake \
curl \
file \
git \
gnupg \
@@ -44,16 +46,16 @@ RUN install_packages ca-certificates curl wget lsb-release software-properties-c
rsync \
ruby \
unzip \
clang-16 \
lld-16 \
lldb-16 \
clangd-16 \
xz-utils \
bash tar gzip ccache nodejs && \
bash tar gzip ccache
ENV CXX=clang++-15
ENV CC=clang-15
RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
install_packages nodejs && \
npm install -g esbuild
ENV CXX=clang++-16
ENV CC=clang-16
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
@@ -70,10 +72,10 @@ ARG ZIG_FILENAME
ENV WEBKIT_OUT_DIR=${WEBKIT_DIR}
ENV BUILDARCH=${BUILDARCH}
ENV AR=/usr/bin/llvm-ar-16
ENV AR=/usr/bin/llvm-ar-15
ENV ZIG "${ZIG_PATH}/zig"
ENV PATH="$ZIG/bin:$PATH"
ENV LD=lld-16
ENV LD=lld-15
RUN mkdir -p $BUN_DIR $BUN_DEPS_OUT_DIR
@@ -155,7 +157,7 @@ COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
ENV CCACHE_DIR=/ccache
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-16) && cd ${BUN_DIR} && \
RUN --mount=type=cache,target=/ccache export PATH=$PATH:$HOME/.cargo/bin && export CC=$(which clang-15) && cd ${BUN_DIR} && \
make lolhtml && rm -rf src/deps/lol-html Makefile
FROM bun-base as mimalloc
@@ -282,39 +284,16 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY packages/bun-uws ${BUN_DIR}/packages/bun-uws
COPY packages/bun-usockets ${BUN_DIR}/packages/bun-usockets
COPY src/deps/uws ${BUN_DIR}/src/deps/uws
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
COPY src/deps/c-ares/include ${BUN_DIR}/src/deps/c-ares/include
COPY src/deps/libuwsockets.cpp ${BUN_DIR}/src/deps/libuwsockets.cpp
COPY src/deps/_libusockets.h ${BUN_DIR}/src/deps/_libusockets.h
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make uws && rm -rf packages/bun-uws Makefile
FROM bun-base as base64
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
WORKDIR $BUN_DIR
RUN cd $BUN_DIR && \
make base64 && rm -rf src/deps/base64 Makefile
make uws && rm -rf src/deps/uws Makefile
FROM bun-base as picohttp
@@ -524,33 +503,6 @@ ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make sqlite
FROM bun-base as zstd
ARG DEBIAN_FRONTEND
ARG GITHUB_WORKSPACE
ARG ZIG_PATH
# Directory extracts to "bun-webkit"
ARG WEBKIT_DIR
ARG BUN_RELEASE_DIR
ARG BUN_DEPS_OUT_DIR
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ENV CCACHE_DIR=/ccache
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY .prettierrc.cjs ${BUN_DIR}/.prettierrc.cjs
WORKDIR $BUN_DIR
ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
RUN --mount=type=cache,target=/ccache cd $BUN_DIR && make zstd
FROM scratch as build_release_cpp
COPY --from=compile_cpp /tmp/*.o /
@@ -577,14 +529,12 @@ ENV JSC_BASE_DIR=${WEBKIT_DIR}
ENV LIB_ICU_PATH=${WEBKIT_DIR}/lib
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=picohttp ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=sqlite ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.a ${BUN_DEPS_OUT_DIR}/
COPY --from=uws ${BUN_DEPS_OUT_DIR}/*.o ${BUN_DEPS_OUT_DIR}/

419
Makefile
View File

@@ -6,6 +6,8 @@ BUN_AUTO_UPDATER_REPO = Jarred-Sumner/bun-releases-for-updater
CMAKE_CXX_COMPILER_LAUNCHER_FLAG :=
# 'make' command will trigger the help target
.DEFAULT_GOAL := help
@@ -18,7 +20,6 @@ CPU_TARGET ?= native
MARCH_NATIVE = -mtune=$(CPU_TARGET)
NATIVE_OR_OLD_MARCH =
MMD_IF_LOCAL =
DEFAULT_MIN_MACOS_VERSION=
ARCH_NAME :=
DOCKER_BUILDARCH =
@@ -38,20 +39,12 @@ NATIVE_OR_OLD_MARCH = -march=nehalem
endif
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
BUN_BASE_VERSION = 1.0
CI ?= false
BUN_BASE_VERSION = 0.6
AR=
ifeq ($(CI), false)
MMD_IF_LOCAL = -MMD
endif
BUN_OR_NODE = $(shell which bun 2>/dev/null || which node 2>/dev/null)
CXX_VERSION=c++2a
TRIPLET = $(OS_NAME)-$(ARCH_NAME)
PACKAGE_NAME = bun-$(TRIPLET)
@@ -82,9 +75,8 @@ ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make
# This is easier to happen than you'd expect.
# Using realpath here causes issues because clang uses clang++ as a symlink
# so if that's resolved, it won't build for C++
REAL_CC = $(shell which clang-16 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-16 2>/dev/null || which clang++ 2>/dev/null)
CLANG_FORMAT = $(shell which clang-format-16 2>/dev/null || which clang-format 2>/dev/null)
REAL_CC = $(shell which clang-15 2>/dev/null || which clang 2>/dev/null)
REAL_CXX = $(shell which clang++-15 2>/dev/null || which clang++ 2>/dev/null)
CC = $(REAL_CC)
CXX = $(REAL_CXX)
@@ -108,14 +100,14 @@ CC_WITH_CCACHE = $(CCACHE_PATH) $(CC)
ifeq ($(OS_NAME),darwin)
# Find LLVM
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm@16)
LLVM_PREFIX = $(shell brew --prefix llvm@15)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
LLVM_PREFIX = $(shell brew --prefix llvm)
endif
ifeq ($(wildcard $(LLVM_PREFIX)),)
# This is kinda ugly, but I can't find a better way to error :(
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@16' or set LLVM_PREFIX=/path/to/llvm")
LLVM_PREFIX = $(shell echo -e "error: Unable to find llvm. Please run 'brew install llvm@15' or set LLVM_PREFIX=/path/to/llvm")
endif
LDFLAGS += -L$(LLVM_PREFIX)/lib
@@ -155,7 +147,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
-DCMAKE_RANLIB=$(which llvm-15-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
@@ -177,11 +169,11 @@ endif
ifeq ($(OS_NAME),linux)
LIBICONV_PATH =
AR = $(shell which llvm-ar-16 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
AR = $(shell which llvm-ar-15 2>/dev/null || which llvm-ar 2>/dev/null || which ar 2>/dev/null)
endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE)
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_TMP_DIR := /tmp/make-bun
@@ -274,7 +266,7 @@ STRIP=/usr/bin/strip
endif
ifeq ($(OS_NAME),linux)
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-16 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
STRIP=$(shell which llvm-strip 2>/dev/null || which llvm-strip-15 2>/dev/null || which strip 2>/dev/null || echo "Missing strip")
endif
@@ -293,14 +285,14 @@ SRC_WEBCORE_FILES := $(wildcard $(SRC_DIR)/webcore/*.cpp)
SRC_SQLITE_FILES := $(wildcard $(SRC_DIR)/sqlite/*.cpp)
SRC_NODE_OS_FILES := $(wildcard $(SRC_DIR)/node_os/*.cpp)
SRC_IO_FILES := $(wildcard src/io/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/js/out/*.cpp)
SRC_BUILTINS_FILES := $(wildcard src/bun.js/builtins/*.cpp)
SRC_WEBCRYPTO_FILES := $(wildcard $(SRC_DIR)/webcrypto/*.cpp)
OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(SRC_FILES))
WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
IO_FILES := $(patsubst src/io/%.cpp,$(OBJ_DIR)/%.o,$(SRC_IO_FILES))
MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(OBJ_DIR)/%.o,$(MODULES_FILES))
WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp,$(OBJ_DIR)/%.o,$(SRC_WEBCRYPTO_FILES))
@@ -309,7 +301,7 @@ DEBUG_OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_FILES)
DEBUG_WEBCORE_OBJ_FILES := $(patsubst $(SRC_DIR)/webcore/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_WEBCORE_FILES))
DEBUG_SQLITE_OBJ_FILES := $(patsubst $(SRC_DIR)/sqlite/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_SQLITE_FILES))
DEBUG_NODE_OS_OBJ_FILES := $(patsubst $(SRC_DIR)/node_os/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_NODE_OS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/js/out/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_BUILTINS_OBJ_FILES := $(patsubst src/bun.js/builtins/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_BUILTINS_FILES))
DEBUG_IO_FILES := $(patsubst src/io/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(SRC_IO_FILES))
DEBUG_MODULES_OBJ_FILES := $(patsubst $(MODULES_DIR)/%.cpp,$(DEBUG_OBJ_DIR)/%.o,$(MODULES_FILES))
DEBUG_WEBCRYPTO_OBJ_FILES := $(patsubst $(SRC_DIR)/webcrypto/%.cpp, $(DEBUG_OBJ_DIR)/%.o, $(SRC_WEBCRYPTO_FILES))
@@ -330,12 +322,12 @@ ALL_JSC_INCLUDE_DIRS := -I$(WEBKIT_RELEASE_DIR)/WTF/Headers \
-I$(WEBKIT_RELEASE_DIR)/WTF/PrivateHeaders
SHARED_INCLUDE_DIR = -I$(realpath src/bun.js/bindings)/ \
-I$(realpath src/js/builtins/) \
-I$(realpath src/js/out/) \
-I$(realpath src/bun.js/builtins/) \
-I$(realpath src/bun.js/bindings) \
-I$(realpath src/bun.js/bindings/webcore) \
-I$(realpath src/bun.js/bindings/webcrypto) \
-I$(realpath src/bun.js/bindings/sqlite) \
-I$(realpath src/bun.js/builtins/cpp) \
-I$(realpath src/bun.js/bindings/node_os) \
-I$(realpath src/bun.js/modules) \
-I$(JSC_INCLUDE_DIR)
@@ -349,10 +341,10 @@ LINUX_INCLUDE_DIRS := $(ALL_JSC_INCLUDE_DIRS) \
-I$(ZLIB_INCLUDE_DIR)
UWS_INCLUDE_DIR := -I$(BUN_DIR)/packages/bun-usockets/src -I$(BUN_DIR)/packages -I$(BUN_DEPS_DIR)
UWS_INCLUDE_DIR := -I$(BUN_DEPS_DIR)/uws/uSockets/src -I$(BUN_DEPS_DIR)/uws/src -I$(BUN_DEPS_DIR)
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -I$(BUN_DEPS_DIR)/zstd/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include -Isrc/bun.js/modules
INCLUDE_DIRS := $(UWS_INCLUDE_DIR) -I$(BUN_DEPS_DIR)/mimalloc/include -Isrc/napi -I$(BUN_DEPS_DIR)/boringssl/include -I$(BUN_DEPS_DIR)/c-ares/include
ifeq ($(OS_NAME),linux)
@@ -401,7 +393,6 @@ CLANG_FLAGS = $(INCLUDE_DIRS) \
-DSTATICALLY_LINKED_WITH_BMALLOC=1 \
-DBUILDING_WITH_CMAKE=1 \
-DBUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1 \
-DNAPI_EXPERIMENTAL=ON \
-DNDEBUG=1 \
-DNOMINMAX \
-DIS_BUILD \
@@ -454,15 +445,13 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
-ldecrepit \
-lssl \
-lcrypto \
-llolhtml \
-lbase64
-llolhtml
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
-larchive \
-ltcc \
-lusockets \
-lcares \
-lzstd \
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
@@ -539,6 +528,9 @@ get-% : ; @echo $($*)
print-version:
@echo $(PACKAGE_JSON_VERSION)
# Prevent dependency on libtcc1 so it doesn't do filesystem lookups
TINYCC_CFLAGS= -DTCC_LIBTCC1=\"\0\"
@@ -550,17 +542,35 @@ tinycc:
cd $(TINYCC_DIR) && \
make clean && \
AR=$(AR) $(CCACHE_CC_FLAG) CFLAGS='$(CFLAGS_WITHOUT_MARCH) $(NATIVE_OR_OLD_MARCH) -mtune=native $(TINYCC_CFLAGS)' ./configure --enable-static --cc=$(CCACHE_CC_OR_CC) --ar=$(AR) --config-predefs=yes && \
make libtcc.a -j10 && \
make -j10 && \
cp $(TINYCC_DIR)/*.a $(BUN_DEPS_OUT_DIR)
PYTHON=$(shell which python 2>/dev/null || which python3 2>/dev/null || which python2 2>/dev/null)
.PHONY: esm
js: # to rebundle js (rebuilding binary not needed to reload js code)
NODE_ENV=production bun src/js/_codegen/index.ts
.PHONY: builtins
builtins: ## to generate builtins
@if [ -z "$(WEBKIT_DIR)" ] || [ ! -d "$(WEBKIT_DIR)/Source" ]; then echo "WebKit is not cloned. Please run make init-submodules"; exit 1; fi
rm -f src/bun.js/bindings/*Builtin*.cpp src/bun.js/bindings/*Builtin*.h src/bun.js/bindings/*Builtin*.cpp
rm -rf src/bun.js/builtins/cpp
mkdir -p src/bun.js/builtins/cpp
$(PYTHON) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --force
$(PYTHON) $(realpath $(WEBKIT_DIR)/Source/JavaScriptCore/Scripts/generate-js-builtins.py) -i $(realpath src)/bun.js/builtins/js -o $(realpath src)/bun.js/builtins/cpp --framework WebCore --wrappers-only
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
rm -rf /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h.1
echo -e '// clang-format off\nnamespace Zig { class GlobalObject; }\n#include "root.h"\n' >> /tmp/1.h
cat /tmp/1.h src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp > src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1
mv src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp.1 src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.cpp
$(SED) -i -e 's/class JSDOMGlobalObject/using JSDOMGlobalObject = Zig::GlobalObject/' src/bun.js/builtins/cpp/WebCoreJSBuiltinInternals.h
# this is the one we actually build
mv src/bun.js/builtins/cpp/*JSBuiltin*.cpp src/bun.js/builtins
.PHONY: generate-builtins
generate-builtins: builtins
esm-debug:
BUN_DEBUG_QUIET_LOGS=1 NODE_ENV=production bun-debug src/js/build-esm.ts
BUN_TYPES_REPO_PATH ?= $(realpath packages/bun-types)
@@ -626,9 +636,6 @@ compile-ffi-test:
sqlite:
.PHONY: zstd
zstd:
cd $(BUN_DEPS_DIR)/zstd && rm -rf build-cmake-debug && cmake $(CMAKE_FLAGS) -DZSTD_BUILD_STATIC=ON -B build-cmake-debug -S build/cmake -G Ninja && ninja -C build-cmake-debug && cp build-cmake-debug/lib/libzstd.a $(BUN_DEPS_OUT_DIR)/libzstd.a
.PHONY: libarchive
libarchive:
@@ -661,10 +668,10 @@ else
PKGNAME_NINJA := ninja-build
endif
.PHONY: assert-deps
assert-deps:
.PHONY: require
require:
@echo "Checking if the required utilities are available..."
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@16"; exit 1; fi
@if [ $(CLANG_VERSION) -lt "15" ]; then echo -e "ERROR: clang version >=15 required, found: $(CLANG_VERSION). Install with:\n\n $(POSIX_PKG_MANAGER) install llvm@15"; exit 1; fi
@cmake --version >/dev/null 2>&1 || (echo -e "ERROR: cmake is required."; exit 1)
@$(PYTHON) --version >/dev/null 2>&1 || (echo -e "ERROR: python is required."; exit 1)
@$(ESBUILD) --version >/dev/null 2>&1 || (echo -e "ERROR: esbuild is required."; exit 1)
@@ -673,80 +680,59 @@ assert-deps:
@which aclocal > /dev/null || (echo -e "ERROR: automake is required. Install with:\n\n $(POSIX_PKG_MANAGER) install automake"; exit 1)
@which $(LIBTOOL) > /dev/null || (echo -e "ERROR: libtool is required. Install with:\n\n $(POSIX_PKG_MANAGER) install libtool"; exit 1)
@which ninja > /dev/null || (echo -e "ERROR: Ninja is required. Install with:\n\n $(POSIX_PKG_MANAGER) install $(PKGNAME_NINJA)"; exit 1)
@which pkg-config > /dev/null || (echo -e "ERROR: pkg-config is required. Install with:\n\n $(POSIX_PKG_MANAGER) install pkg-config"; exit 1)
@which rustc > /dev/null || (echo -e "ERROR: rustc is required." exit 1)
@which cargo > /dev/null || (echo -e "ERROR: cargo is required." exit 1)
@test $(shell cargo --version | awk '{print $$2}' | cut -d. -f2) -gt 57 || (echo -e "ERROR: cargo version must be at least 1.57."; exit 1)
@echo "You have the dependencies installed! Woo"
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
# unless you explicity clone it yourself (a huge download)
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
endif
.PHONY: init-submodules
init-submodules: submodule # (backwards-compatibility alias)
.PHONY: submodule
submodule: ## to init or update all submodules
git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES)
init-submodules:
git submodule update --init --recursive --progress --depth=1 --checkout
.PHONY: build-obj
build-obj:
$(ZIG) build obj -Doptimize=ReleaseFast -Dcpu="$(CPU_TARGET)"
.PHONY: build-obj-small
build-obj-small:
$(ZIG) build obj -Doptimize=ReleaseSmall -Dcpu="$(CPU_TARGET)"
.PHONY: dev-build-obj-wasm
dev-build-obj-wasm:
$(ZIG) build bun-wasm -Dtarget=wasm32-freestanding
.PHONY: dev-wasm
dev-wasm: dev-build-obj-wasm
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-g2 -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
packages/debug-bun-freestanding-wasm32/bun-wasm.o --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/debug-bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/debug-bun-freestanding-wasm32/bun-wasm.wasm
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/debug-bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: build-obj-wasm
build-obj-wasm:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o $(OPTIMIZATION_LEVEL) --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: build-obj-wasm-small
build-obj-wasm-small:
$(ZIG) build bun-wasm -Doptimize=ReleaseFast -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init', '_getTests']" \
-Oz -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/$(MIMALLOC_FILE).wasm \
$(ZIG) build bun-wasm -Doptimize=ReleaseSmall -Dtarget=wasm32-freestanding
emcc -sEXPORTED_FUNCTIONS="['_bun_free', '_cycleStart', '_cycleEnd', '_bun_malloc', '_scan', '_transform', '_init']" \
-g -s ERROR_ON_UNDEFINED_SYMBOLS=0 -DNDEBUG \
$(BUN_DEPS_DIR)/libmimalloc.a.wasm \
packages/bun-freestanding-wasm32/bun-wasm.o -Oz --no-entry --allow-undefined -s ASSERTIONS=0 -s ALLOW_MEMORY_GROWTH=1 -s WASM_BIGINT=1 \
-o packages/bun-freestanding-wasm32/bun-wasm.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
cp packages/bun-freestanding-wasm32/bun-wasm.wasm src/api/demo/public/bun-wasm.wasm
.PHONY: wasm
wasm: api mimalloc-wasm build-obj-wasm-small
@rm -rf packages/bun-wasm/*.{d.ts,d.cts,d.mts,js,wasm,cjs,mjs,tsbuildinfo}
wasm: api build-obj-wasm-small
@rm -rf packages/bun-wasm/*.{d.ts,js,wasm,cjs,mjs,tsbuildinfo}
@cp packages/bun-freestanding-wasm32/bun-wasm.wasm packages/bun-wasm/bun.wasm
@cp src/api/schema.d.ts packages/bun-wasm/schema.d.ts
@cp src/api/schema.js packages/bun-wasm/schema.js
@cd packages/bun-wasm && $(NPM_CLIENT) run tsc -- -p .
@cp packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.cts
@mv packages/bun-wasm/index.d.ts packages/bun-wasm/index.d.mts
@bun build --sourcemap=external --external=fs --outdir=packages/bun-wasm --target=browser --minify ./packages/bun-wasm/index.ts
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=esm --minify 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.mjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.mjs.map
@$(ESBUILD) --sourcemap=external --external:fs --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@$(ESBUILD) --sourcemap=external --external:fs --define:process.env.NODE_ENV='"production"' --outdir=packages/bun-wasm --target=esnext --bundle packages/bun-wasm/index.ts --format=cjs --minify --platform=node 2> /dev/null
@mv packages/bun-wasm/index.js packages/bun-wasm/index.cjs
@mv packages/bun-wasm/index.js.map packages/bun-wasm/index.cjs.map
@rm -rf packages/bun-wasm/*.tsbuildinfo
@@ -760,17 +746,17 @@ build-obj-safe:
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions -fno-rtti
UWS_LDFLAGS = -I$(BUN_DEPS_DIR)/boringssl/include -I$(ZLIB_INCLUDE_DIR)
USOCKETS_DIR = $(BUN_DIR)/packages/bun-usockets
USOCKETS_SRC_DIR = $(USOCKETS_DIR)/src
USOCKETS_DIR = $(BUN_DEPS_DIR)/uws/uSockets/
USOCKETS_SRC_DIR = $(BUN_DEPS_DIR)/uws/uSockets/src/
usockets:
rm -rf $(USOCKETS_DIR)/*.i $(USOCKETS_DIR)/*.bc $(USOCKETS_DIR)/*.o $(USOCKETS_DIR)/*.s $(USOCKETS_DIR)/*.ii $(USOCKETS_DIR)/*.s
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
rm -rf $(BUN_DEPS_DIR)/uws/uSockets/*.o $(BUN_DEPS_DIR)/uws/uSockets/**/*.o $(BUN_DEPS_DIR)/uws/uSockets/*.a $(BUN_DEPS_DIR)/uws/uSockets/*.bc
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
cd $(USOCKETS_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/libusockets.a $(USOCKETS_DIR)/*.{o,bc}
uws: usockets
$(CXX_WITH_CCACHE) -O2 $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(USOCKETS_SRC_DIR) $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
$(CXX_WITH_CCACHE) -O2 $(EMIT_LLVM_FOR_RELEASE) -fPIC -I$(BUN_DEPS_DIR)/uws/uSockets/src $(CLANG_FLAGS) $(CFLAGS) $(UWS_CXX_FLAGS) $(UWS_LDFLAGS) $(PLATFORM_LINKER_FLAGS) -c -I$(BUN_DEPS_DIR) $(BUN_DEPS_OUT_DIR)/libusockets.a $(BUN_DEPS_DIR)/libuwsockets.cpp -o $(BUN_DEPS_OUT_DIR)/libuwsockets.o
.PHONY: sign-macos-x64
sign-macos-x64:
@@ -781,10 +767,10 @@ sign-macos-aarch64:
gon sign.macos-aarch64.json
cls:
@echo -e "\n\n---\n\n"
@echo "\n\n---\n\n"
jsc-check:
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
@ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1)
@ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1)
@@ -807,11 +793,11 @@ release-safe: prerelease release-safe-only
.PHONY: fmt-cpp
fmt-cpp:
cd src/bun.js/bindings && $(CLANG_FORMAT) *.cpp *.h -i
cd src/bun.js/bindings && clang-format *.cpp *.h -i
.PHONY: fmt-zig
fmt-zig:
cd src && $(ZIG) fmt **/*.zig
cd src && zig fmt **/*.zig
.PHONY: fmt
fmt: fmt-cpp fmt-zig
@@ -900,8 +886,7 @@ check-glibc-version-dependency:
ifeq ($(OS_NAME),darwin)
zig-win32:
$(ZIG) build -Dtarget=x86_64-windows
# Hardened runtime will not work with debugging
bun-codesign-debug:
@@ -923,6 +908,7 @@ bun-codesign-release-local:
bun-codesign-release-local-debug:
.PHONY: jsc
jsc: jsc-build jsc-copy-headers jsc-bindings
.PHONY: jsc-build
@@ -934,6 +920,7 @@ jsc-bindings: headers bindings
clone-submodules:
git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress
CLANG_FORMAT := $(shell command -v clang-format 2> /dev/null)
.PHONY: headers
headers:
@@ -945,7 +932,6 @@ headers:
$(ZIG) translate-c src/bun.js/bindings/headers.h > src/bun.js/bindings/headers.zig
$(BUN_OR_NODE) misctools/headers-cleaner.js
$(ZIG) fmt src/bun.js/bindings/headers.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedCode.cpp
.PHONY: jsc-bindings-headers
jsc-bindings-headers: headers
@@ -1090,30 +1076,17 @@ test/wiptest/run: test/wiptest/run.o
release-bin-dir:
echo $(PACKAGE_DIR)
.PHONY: dev-obj-track
dev-obj-track:
bun .scripts/make-dev-timer.ts $(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-notrack
dev-obj-notrack:
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj
dev-obj:
ifeq ($(shell which bun),)
dev-obj : dev-obj-notrack
else
dev-obj : dev-obj-track
endif
$(ZIG) build obj -freference-trace -Dcpu="$(CPU_TARGET)"
.PHONY: dev-obj-linux
dev-obj-linux:
$(ZIG) build obj -Dtarget=x86_64-linux-gnu -Dcpu="$(CPU_TARGET)"
.PHONY: dev
dev: mkdir-dev dev-obj bun-link-lld-debug
mkdir-dev:
mkdir -p $(DEBUG_PACKAGE_DIR)
@@ -1142,6 +1115,7 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ObjectPropertyConditionSet.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ObjectPropertyConditionSet.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PolyProtoAccessChain.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PolyProtoAccessChain.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/InlineCacheCompiler.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/InlineCacheCompiler.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PutKind.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PutKind.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubClearingWatchpoint.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubClearingWatchpoint.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AdaptiveInferredPropertyValueWatchpointBase.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AdaptiveInferredPropertyValueWatchpointBase.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
@@ -1175,9 +1149,6 @@ jsc-copy-headers:
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AsyncFunctionPrototype.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AsyncFunctionPrototype.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
# This is a workaround for a JSC bug that impacts aarch64
@@ -1197,12 +1168,10 @@ jsc-build-mac-compile:
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DENABLE_FTL_JIT=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1221,11 +1190,9 @@ jsc-build-mac-compile-lto:
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_THIN_ARCHIVES=OFF \
-DBUN_FAST_TLS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DCMAKE_C_FLAGS="-flto=full" \
-DCMAKE_CXX_FLAGS="-flto=full" \
-DENABLE_FTL_JIT=ON \
@@ -1250,8 +1217,6 @@ jsc-build-mac-compile-debug:
-DUSE_THIN_ARCHIVES=OFF \
-DENABLE_FTL_JIT=ON \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-G Ninja \
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
-DPTHREAD_JIT_PERMISSIONS_API=1 \
@@ -1270,13 +1235,11 @@ jsc-build-linux-compile-config:
cmake \
-DPORT="JSCOnly" \
-DENABLE_STATIC_JSC=ON \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_BUILD_TYPE=relwithdebuginfo \
-DUSE_THIN_ARCHIVES=OFF \
-DUSE_BUN_JSC_ADDITIONS=ON \
-DENABLE_FTL_JIT=ON \
-DENABLE_REMOTE_INSPECTOR=ON \
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
-USE_VISIBILITY_ATTRIBUTE=1 \
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
-G Ninja \
@@ -1291,7 +1254,7 @@ jsc-build-linux-compile-config:
jsc-build-linux-compile-build:
mkdir -p $(WEBKIT_RELEASE_DIR) && \
cd $(WEBKIT_RELEASE_DIR) && \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects" \
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
@@ -1335,7 +1298,7 @@ jsc-bindings-mac: bindings
MIMALLOC_VALGRIND_ENABLED_FLAG =
ifeq ($(OS_NAME),linux)
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_TRACK_VALGRIND=ON
MIMALLOC_VALGRIND_ENABLED_FLAG = -DMI_VALGRIND=ON
endif
@@ -1357,9 +1320,8 @@ mimalloc-debug:
-DMI_OVERRIDE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-DCMAKE_CXX_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
@@ -1381,31 +1343,24 @@ mimalloc:
-DMI_OVERRIDE=OFF \
-DMI_OSX_ZONE=OFF \
-DCMAKE_C_FLAGS="$(CFLAGS)" \
-GNinja \
. \
&& ninja;
.\
&& make -j $(CPUS);
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
mimalloc-wasm:
rm -rf $(BUN_DEPS_DIR)/mimalloc/CMakeCache* $(BUN_DEPS_DIR)/mimalloc/CMakeFiles
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -GNinja -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=OFF .; emmake cmake --build .;
cd $(BUN_DEPS_DIR)/mimalloc; emcmake cmake -DMI_BUILD_SHARED=OFF -DMI_BUILD_STATIC=ON -DMI_BUILD_TESTS=OFF -DMI_BUILD_OBJECT=ON ${MIMALLOC_OVERRIDE_FLAG} -DMI_USE_CXX=ON .; emmake make;
cp $(BUN_DEPS_DIR)/mimalloc/$(MIMALLOC_INPUT_PATH) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE).wasm
# alias for link, incase anyone still types that
.PHONY: bun-link-lld-debug
bun-link-lld-debug: link
.PHONY: link
link: ## link a debug build of bun
bun-link-lld-debug:
$(CXX) $(BUN_LLD_FLAGS_DEBUG) $(DEBUG_FLAGS) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
-W \
-o $(DEBUG_BIN)/bun-debug
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
@rm -f $(DEBUG_BIN)/bun-debug.o.o 2> /dev/null # workaround for https://github.com/ziglang/zig/issues/14080
link-no-jsc:
bun-link-lld-debug-no-jsc:
$(CXX) $(BUN_LLD_FLAGS_WITHOUT_JSC) $(SYMBOLS) \
-g \
$(DEBUG_BIN)/bun-debug.o \
@@ -1424,19 +1379,6 @@ bun-relink-copy:
mkdir -p $(PACKAGE_DIR)
cp $(BUN_DEPLOY_DIR).o $(BUN_RELEASE_BIN).o
.PHONY: bun-link-lld-profile profile
bun-link-lld-profile:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) -g -gdwarf-4 -fno-omit-frame-pointer \
$(BUN_RELEASE_BIN).o \
-o $(BUN_RELEASE_BIN) \
-W \
$(OPTIMIZATION_LEVEL) $(RELEASE_FLAGS)
rm -rf $(BUN_RELEASE_BIN).dSYM
cp $(BUN_RELEASE_BIN) $(BUN_RELEASE_BIN)-profile
@rm -f $(BUN_RELEASE_BIN).o.o # workaround for https://github.com/ziglang/zig/issues/14080
build-profile: build-obj bun-link-lld-profile bun-codesign-release-local
bun-link-lld-release:
$(CXX) $(BUN_LLD_FLAGS) $(SYMBOLS) \
$(BUN_RELEASE_BIN).o \
@@ -1482,17 +1424,17 @@ bun-relink: bun-relink-copy bun-link-lld-release bun-link-lld-release-dsym
bun-relink-fast: bun-relink-copy bun-link-lld-release-no-lto
wasm-return1:
$(ZIG) build-lib -OReleaseSmall test/bun.js/wasm-return-1-test.zig -femit-bin=test/bun.js/wasm-return-1-test.wasm -target wasm32-freestanding
zig build-lib -OReleaseSmall test/bun.js/wasm-return-1-test.zig -femit-bin=test/bun.js/wasm-return-1-test.wasm -target wasm32-freestanding
generate-classes:
bun src/bun.js/scripts/generate-classes.ts
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
clang-format -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
generate-sink:
bun src/bun.js/scripts/generate-jssink.js
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
./src/bun.js/scripts/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
clang-format -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
$(WEBKIT_DIR)/Source/JavaScriptCore/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/namespace JSC {//' src/bun.js/bindings/JSSinkLookupTable.h
$(SED) -i -e 's/} \/\/ namespace JSC//' src/bun.js/bindings/JSSinkLookupTable.h
@@ -1515,10 +1457,10 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1526,10 +1468,10 @@ $(OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1537,10 +1479,10 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1548,10 +1490,10 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1559,10 +1501,10 @@ $(OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1570,21 +1512,21 @@ $(OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
$(OBJ_DIR)/%.o: src/js/out/%.cpp
$(OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1593,10 +1535,10 @@ $(OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM) \
-c -o $@ $<
@@ -1607,10 +1549,10 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) $(UWS_INCLUDE) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
@@ -1622,10 +1564,10 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/webcore/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1635,10 +1577,10 @@ $(DEBUG_OBJ_DIR)/%.o: src/io/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
-DBUN_DEBUG \
$(EMIT_LLVM_FOR_DEBUG) \
-g3 -c -o $@ $<
@@ -1651,10 +1593,10 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/sqlite/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1666,25 +1608,25 @@ $(DEBUG_OBJ_DIR)/%.o: $(SRC_DIR)/node_os/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
# $(DEBUG_OBJ_DIR) is not included here because it breaks
# detecting if a file needs to be rebuilt
.PHONY: src/js/out/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/js/out/%.cpp
.PHONY: src/bun.js/builtins/%.cpp
$(DEBUG_OBJ_DIR)/%.o: src/bun.js/builtins/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1694,10 +1636,10 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/modules/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1708,11 +1650,11 @@ $(DEBUG_OBJ_DIR)/%.o: src/bun.js/bindings/webcrypto/%.cpp
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(DEBUG_OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-MMD \
-fno-exceptions \
-I$(SRC_DIR) \
-fno-rtti \
-ferror-limit=10 \
-ferror-limit=1000 \
$(EMIT_LLVM_FOR_DEBUG) \
-DBUN_DEBUG \
-g3 -c -o $@ $<
@@ -1726,7 +1668,7 @@ sizegen:
# Linux uses bundled SQLite3
ifeq ($(OS_NAME),linux)
sqlite:
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_FTS5=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
$(CC) $(EMIT_LLVM_FOR_RELEASE) $(CFLAGS) $(INCLUDE_DIRS) -DSQLITE_ENABLE_COLUMN_METADATA= -DSQLITE_MAX_VARIABLE_NUMBER=250000 -DSQLITE_ENABLE_RTREE=1 -DSQLITE_ENABLE_FTS3=1 -DSQLITE_ENABLE_FTS3_PARENTHESIS=1 -DSQLITE_ENABLE_JSON1=1 $(SRC_DIR)/sqlite/sqlite3.c -c -o $(SQLITE_OBJECT)
endif
picohttp:
@@ -1822,10 +1764,10 @@ endif
endif
.PHONY: build-unit
build-unit: # to build your unit tests
build-unit: ## to build your unit tests
@rm -rf zig-out/bin/$(testname)
@mkdir -p zig-out/bin
$(ZIG) test $(realpath $(testpath)) \
zig test $(realpath $(testpath)) \
$(testfilterflag) \
$(PACKAGE_MAP) \
--main-pkg-path $(BUN_DIR) \
@@ -1840,10 +1782,10 @@ build-unit: # to build your unit tests
cp zig-out/bin/$(testname) $(testbinpath)
.PHONY: run-all-unit-tests
run-all-unit-tests: # to run your unit tests
run-all-unit-tests: ## to run your unit tests
@rm -rf zig-out/bin/__main_test
@mkdir -p zig-out/bin
$(ZIG) test src/main.zig \
zig test src/main.zig \
$(PACKAGE_MAP) \
--main-pkg-path $(BUN_DIR) \
--test-no-exec \
@@ -1860,11 +1802,15 @@ run-all-unit-tests: # to run your unit tests
run-unit:
@zig-out/bin/$(testname) $(ZIG)
.PHONY: help
help: ## to print this help
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
.PHONY: test
test: build-unit run-unit
.PHONY: integration-test-dev
integration-test-dev: # to run integration tests
integration-test-dev: ## to run integration tests
USE_EXISTING_PROCESS=true TEST_SERVER_URL=http://localhost:3000 node test/scripts/browser.js
copy-install:
@@ -1878,81 +1824,32 @@ copy-to-bun-release-dir-bin:
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
.PHONY: base64
base64:
cd $(BUN_DEPS_DIR)/base64 && make clean && rm -rf CMakeCache.txt CMakeFiles && cmake $(CMAKE_FLAGS) . && make
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
.PHONY: cold-jsc-start
cold-jsc-start:
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
$(MACOS_MIN_FLAG) \
$(OPTIMIZATION_LEVEL) \
${MMD_IF_LOCAL} \
-fno-exceptions \
-fno-rtti \
-ferror-limit=10 \
$(LIBICONV_PATH) \
$(DEFAULT_LINKER_FLAGS) \
$(PLATFORM_LINKER_FLAGS) \
$(ICU_FLAGS) \
$(JSC_FILES) \
misctools/cold-jsc-start.cpp -o cold-jsc-start
.PHONY: vendor-without-npm
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares
.PHONY: vendor-without-check
vendor-without-check: npm-install vendor-without-npm
.PHONY: vendor
vendor: assert-deps submodule vendor-without-check
vendor: require init-submodules vendor-without-check
.PHONY: vendor-dev
vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm
vendor-dev: require init-submodules npm-install-dev vendor-without-npm
.PHONY: bun
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
.PHONY: static-hash-table
static-hash-table:
bun src/js/_codegen/static-hash-tables.ts
.PHONY: cpp
cpp: ## compile src/js/builtins + all c++ code then link
@make clean-bindings js
@make static-hash-table
.PHONY: regenerate-bindings
regenerate-bindings:
@make clean-bindings builtins
@make bindings -j$(CPU_COUNT)
@make link
.PHONY: cpp
cpp-no-link:
@make clean-bindings js
@make bindings -j$(CPU_COUNT)
.PHONY: zig
zig: ## compile zig code then link
@make mkdir-dev dev-obj link
.PHONY: zig-no-link
zig-no-link:
@make mkdir-dev dev-obj
.PHONY: dev
dev: # combo of `make cpp` and `make zig`
@make cpp-no-link zig-no-link -j2
@make link
.PHONY: setup
setup: vendor-dev identifier-cache clean-bindings
make jsc-check dev
setup: vendor-dev builtins identifier-cache clean-bindings
make jsc-check
make bindings -j$(CPU_COUNT)
@echo ""
@echo "First build complete!"
@echo "\"bun-debug\" is available at $(DEBUG_BIN)/bun-debug"
@echo "Development environment setup complete"
@echo "Run \`make dev\` to build \`bun-debug\`"
@echo ""
.PHONY: help
help: ## to print this help
@echo "For detailed build instructions, see https://bun.sh/docs/project/development"
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)

View File

@@ -24,14 +24,14 @@
## What is Bun?
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
> **Bun is still under development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keeps tabs on future releases.
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
```bash
bun run index.tsx # TS and JSX supported out-of-the-box
bun run index.tsx # TS and JSX supported out of the box
```
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
@@ -48,7 +48,7 @@ bunx cowsay 'Hello, world!' # execute a package
Bun supports Linux (x64 & arm64) and macOS (x64 & Apple Silicon).
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
>
>
> **Windows users** — Bun does not currently provide a native Windows build. We're working on this; progress can be tracked at [this issue](https://github.com/oven-sh/bun/issues/43). In the meantime, use one of the installation methods below for Windows Subsystem for Linux.
```sh
@@ -67,6 +67,7 @@ docker pull oven/bun
docker run --rm --init --ulimit memlock=-1:-1 oven/bun
```
### Upgrade
To upgrade to the latest version of Bun, run:
@@ -93,14 +94,13 @@ bun upgrade --canary
- [`bun run`](https://bun.sh/docs/cli/run)
- [`bun install`](https://bun.sh/docs/cli/install)
- [`bun test`](https://bun.sh/docs/cli/test)
- [`bun init`](https://bun.sh/docs/templates#bun-init)
- [`bun create`](https://bun.sh/docs/templates#bun-create)
- [`bun create`](https://bun.sh/docs/cli/create)
- [`bunx`](https://bun.sh/docs/cli/bunx)
- Runtime
- [Runtime](https://bun.sh/docs/runtime/index)
- [Module resolution](https://bun.sh/docs/runtime/modules)
- [Hot &amp; live reloading](https://bun.sh/docs/runtime/hot)
- [Plugins](https://bun.sh/docs/bundler/plugins)
- [Plugins](https://bun.sh/docs/runtime/plugins)
- Ecosystem
- [Node.js](https://bun.sh/docs/ecosystem/nodejs)
- [TypeScript](https://bun.sh/docs/ecosystem/typescript)
@@ -124,8 +124,10 @@ bun upgrade --canary
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
- [Testing](https://bun.sh/docs/api/test)
- [Utils](https://bun.sh/docs/api/utils)
- [DNS](https://bun.sh/docs/api/dns)
- [Node-API](https://bun.sh/docs/api/node-api)
## Contributing
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.

View File

@@ -1,12 +0,0 @@
# Security Policy
## Supported Versions
| Version | Supported |
| ------- | ------------------ |
| 1.x.x | :white_check_mark: |
## Reporting a Vulnerability
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.

View File

@@ -1,30 +0,0 @@
// https://github.com/nodejs/node/issues/34493
import { AsyncLocalStorage } from "async_hooks";
const asyncLocalStorage = new AsyncLocalStorage();
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
let fn = () => /test/.test("test");
let runWithExpiry = async (expiry, fn) => {
let iterations = 0;
while (Date.now() < expiry) {
await fn();
iterations++;
}
return iterations;
};
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
let withAls;
await asyncLocalStorage.run(123, async () => {
withAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
});
asyncLocalStorage.disable();
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");

Binary file not shown.

View File

@@ -1,171 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
esbuild

View File

@@ -1,40 +0,0 @@
# Bundler benchmark
This is a performance benchmark of the following bundlers:
- Bun
- esbuild
- Parcel 2
- Rollup + Terser
- Webpack
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
To run the benchmark:
```sh
$ chmod +x run-bench.sh
$ ./run-bench.sh
```
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
```sh
real <number>
user <number>
sys <number>
```
These lines are generated by the `time` command which is used to benchmark each build.
## Results
The `real` results, as run on a 16-inch M1 Macbook Pro:
| Bundler | Time |
| ------- | ------ |
| Bun | 0.17s |
| esbuild | 0.33s |
| Rollup | 18.82s |
| Webpack | 26.21 |
| Parcel | 17.95s |

Binary file not shown.

View File

@@ -1 +0,0 @@
console.log("Hello via Bun!");

View File

@@ -1,8 +0,0 @@
{
"name": "bundle",
"module": "index.ts",
"type": "module",
"devDependencies": {
"bun-types": "^0.7.0"
}
}

View File

@@ -1,3 +0,0 @@
git clone git@github.com:colinhacks/esbuild.git
cd esbuild
make bench-three

View File

@@ -1,20 +0,0 @@
{
"compilerOptions": {
"lib": [
"ESNext"
],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
}
}

View File

@@ -1,31 +0,0 @@
import EventEmitter3 from "eventemitter3";
import { group } from "mitata";
import EventEmitterNative from "node:events";
export const implementations = [
{
EventEmitter: EventEmitterNative,
name: process.isBun ? (EventEmitterNative.init ? "bun" : "C++") : "node:events",
monkey: true,
},
// { EventEmitter: EventEmitter3, name: "EventEmitter3" },
].filter(Boolean);
for (const impl of implementations) {
impl.EventEmitter?.setMaxListeners?.(Infinity);
}
export function groupForEmitter(name, cb) {
if (implementations.length === 1) {
return cb({
...implementations[0],
name: `${name}: ${implementations[0].name}`,
});
} else {
return group(name, () => {
for (let impl of implementations) {
cb(impl);
}
});
}
}

View File

@@ -1,96 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("single emit", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("on x 10_000 (handler)", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
if (!called) throw new Error("not called");
});
});
// for (let { impl: EventEmitter, name, monkey } of []) {
// if (monkey) {
// var monkeyEmitter = Object.assign({}, EventEmitter.prototype);
// monkeyEmitter.on("hello", event => {
// event.preventDefault();
// });
// bench(`[monkey] ${className}.emit`, () => {
// var called = false;
// monkeyEmitter.emit("hello", {
// preventDefault() {
// id++;
// called = true;
// },
// });
// if (!called) {
// throw new Error("monkey failed");
// }
// });
// bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
// var cb = () => {
// event.preventDefault();
// };
// monkeyEmitter.on("hey", cb);
// for (let i = 0; i < 10_000; i++)
// monkey.emit("hey", {
// preventDefault() {
// id++;
// },
// });
// monkeyEmitter.off("hey", cb);
// });
// }
// }
// var target = new EventTarget();
// target.addEventListener("hello", event => {});
// bench("EventTarget.dispatch", () => {
// target.dispatchEvent(event);
// });
// var hey = new Event("hey");
// bench("EventTarget.on x 10_000 (handler)", () => {
// var handler = event => {};
// target.addEventListener("hey", handler);
// for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
// target.removeEventListener("hey", handler);
// });
await run();

View File

@@ -1,40 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;
groupForEmitter("test 1", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
groupForEmitter("test 2", ({ EventEmitter, name }) => {
const emitter = new EventEmitter();
bench(name, () => {
emitter.once("hello", event => {
event.preventDefault();
});
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
});
await run();

View File

@@ -1,63 +0,0 @@
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Psuedo RNG is derived from https://stackoverflow.com/a/424445
let rngState = 123456789;
function nextInt() {
const m = 0x80000000; // 2**31;
const a = 1103515245;
const c = 12345;
rngState = (a * rngState + c) % m;
return rngState;
}
function nextRange(start, end) {
// returns in range [start, end): including start, excluding end
// can't modulu nextInt because of weak randomness in lower bits
const rangeSize = end - start;
const randomUnder1 = nextInt() / 0x7fffffff; // 2**31 - 1
return start + Math.floor(randomUnder1 * rangeSize);
}
const chunks = new Array(1024).fill(null).map((_, j) => {
const arr = new Uint8Array(1024);
for (let i = 0; i < arr.length; i++) {
arr[i] = nextRange(0, 256);
}
return arr;
});
groupForEmitter("stream simulation", ({ EventEmitter, name }) => {
bench(name, () => {
let id = 0;
const stream = new EventEmitter();
stream.on("start", res => {
if (res.status !== 200) throw new Error("not 200");
});
const recived = [];
stream.on("data", req => {
recived.push(req);
});
stream.on("end", ev => {
ev.preventDefault();
});
// simulate a stream
stream.emit("start", { status: 200 });
for (let chunk of chunks) {
stream.emit("data", chunk);
}
stream.emit("end", {
preventDefault() {
id++;
},
});
if (id !== 1) throw new Error("not implemented right");
if (recived.length !== 1024) throw new Error("not implemented right");
});
});
await run();

View File

@@ -10,7 +10,7 @@ To run in Bun:
```bash
# so it doesn't run the vitest one
bun test expect-to-equal.test.js
bun wiptest expect-to-equal.test.js
```
To run in Jest:

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -125,30 +125,30 @@ pub fn main() anyerror!void {
);
};
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -203,7 +203,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -43,7 +43,7 @@ pub fn main() anyerror!void {
var position = try std.fmt.parseInt(u32, position_str, 10);
const filepath = try std.fs.path.resolve(allocator, &.{basepath});
var file = try std.fs.openFileAbsolute(filepath, .{ .write = true });
var ms = @as(u64, @truncate((try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms));
var ms = @truncate(u64, (try std.fmt.parseInt(u128, args[args.len - 1], 10)) * std.time.ns_per_ms);
std.debug.assert(ms > 0);
// std.debug.assert(std.math.isFinite(position));
var prng = std.rand.DefaultPrng.init(0);
@@ -112,30 +112,30 @@ pub fn main() anyerror!void {
\\
++ SIMULATE_LONG_FILE;
counters[counter].timestamp = @as(u64, @truncate(@as(u128, @intCast(std.time.nanoTimestamp())) / (std.time.ns_per_ms / 10)));
counters[counter].timestamp = @truncate(u64, @intCast(u128, std.time.nanoTimestamp()) / (std.time.ns_per_ms / 10));
counters[counter].rotate = rotate % 360;
counters[counter].percent = std.math.mod(f64, std.math.round(((progress_bar + 1.0) / destination_count) * 1000) / 1000, 100) catch 0;
counters[counter].color_values[0] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[1] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[2] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[0][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[0] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][0] + 1) % 256)) * 0.8));
counters[counter].color_values[1] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][1] + 1) % 256)) * 0.8));
counters[counter].color_values[2] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[0][2] + 1) % 256)) * 0.8));
counters[counter].color_values[3] = (colors[0][0] + 1) % 256;
counters[counter].color_values[4] = (colors[0][1] + 1) % 256;
counters[counter].color_values[5] = (colors[0][2] + 1) % 256;
counters[counter].color_values[6] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[7] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[8] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[1][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[6] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][0] + 1) % 256)) * 0.8));
counters[counter].color_values[7] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][1] + 1) % 256)) * 0.8));
counters[counter].color_values[8] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[1][2] + 1) % 256)) * 0.8));
counters[counter].color_values[9] = (colors[1][0] + 1) % 256;
counters[counter].color_values[10] = (colors[1][1] + 1) % 256;
counters[counter].color_values[11] = (colors[1][2] + 1) % 256;
counters[counter].color_values[12] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[13] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[14] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[2][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[12] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][0] + 1) % 256)) * 0.8));
counters[counter].color_values[13] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][1] + 1) % 256)) * 0.8));
counters[counter].color_values[14] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[2][2] + 1) % 256)) * 0.8));
counters[counter].color_values[15] = (colors[2][0] + 1) % 256;
counters[counter].color_values[16] = (colors[2][1] + 1) % 256;
counters[counter].color_values[17] = (colors[2][2] + 1) % 256;
counters[counter].color_values[18] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][0] + 1) % 256))) * 0.8)));
counters[counter].color_values[19] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][1] + 1) % 256))) * 0.8)));
counters[counter].color_values[20] = @as(u32, @intFromFloat(std.math.round(@as(f64, @floatFromInt(((colors[3][2] + 1) % 256))) * 0.8)));
counters[counter].color_values[18] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][0] + 1) % 256)) * 0.8));
counters[counter].color_values[19] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][1] + 1) % 256)) * 0.8));
counters[counter].color_values[20] = @floatToInt(u32, std.math.round(@intToFloat(f64, ((colors[3][2] + 1) % 256)) * 0.8));
counters[counter].color_values[21] = (colors[3][0] + 1) % 256;
counters[counter].color_values[22] = (colors[3][1] + 1) % 256;
counters[counter].color_values[23] = (colors[3][2] + 1) % 256;
@@ -190,7 +190,7 @@ pub fn main() anyerror!void {
_ = try recorder.wait();
all_timestamps[0] = wrote.len;
for (counters, 0..) |count, i| {
for (counters) |count, i| {
all_timestamps[i + 1] = count.timestamp;
}

View File

@@ -5,4 +5,4 @@
"jsx": "react-jsx",
"paths": {}
}
}
}

View File

@@ -1,33 +1,11 @@
# `install` benchmark
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine). The goal of this benchmark is to compare installation performance of Bun with other package managers _when caches are hot_.
Requires [`hyperfine`](https://github.com/sharkdp/hyperfine)
### With lockfile, online mode
To run the benchmark with the standard "install" command for each package manager:
```sh
```
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 --runs 3 'bun install' 'pnpm install' 'yarn' 'npm install'
```
### With lockfile, offline mode
Even though all packages are cached, some tools may hit the npm API during the version resolution step. (This is not the same as re-downloading a package.) To entirely avoid network calls, the other package managers require `--prefer-offline/--offline` flag. To run the benchmark using "offline" mode:
```sh
$ hyperfine --prepare 'rm -rf node_modules' --runs 1 'bun install' 'pnpm install --prefer-offline' 'yarn --offline' 'npm install --prefer-offline'
```
### Without lockfile, offline mode
To run the benchmark with offline mode but without lockfiles:
```sh
$ hyperfine --prepare 'rm -rf node_modules' --warmup 1 'rm bun.lockb && bun install' 'rm pnpm-lock.yaml && pnpm install --prefer-offline' 'rm yarn.lock && yarn --offline' 'rm package-lock.json && npm install --prefer-offline'
```
##
To check that the app is working as expected:
```

1986
bench/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,11 @@
{
"name": "bench",
"dependencies": {
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7",
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"mitata": "^0.1.6",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"mitata": "^0.1.6"
"@swc/core": "^1.2.133",
"@babel/core": "^7.16.10",
"@babel/preset-react": "^7.16.7"
},
"scripts": {
"ffi": "cd ffi && bun run deps && bun run build && bun run bench",

View File

@@ -1,5 +1,5 @@
// to run this:
// NODE_ENV=production bun react-hello-world.jsx
// NODE_ENV=production bun --jsx-production react-hello-world.jsx
// Make sure you're using react-dom@18.3.0 or later.
// Currently that is available at react-dom@next (which is installed in this repository)

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,6 +1,5 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import * as assert from "assert";
bench("deepEqual", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,9 +1,6 @@
// @runtime bun,node,deno
import { bench, run } from "./runner.mjs";
import process from "node:process";
import { Buffer } from "node:buffer";
import { bench, run } from "mitata";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
const N = parseInt(process.argv.slice(2).at(0) || "10", 10);
var isBuffer = new Buffer(0);
var isNOtBuffer = "not a buffer";

View File

@@ -1,29 +0,0 @@
import { bench, run } from "./runner.mjs";
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");
const uuid = crypto.randomBytes(16);
bench(`Buffer(${bigBuffer.byteLength}).toString('base64')`, () => {
return bigBuffer.toString("base64");
});
bench(`Buffer(${uuid.byteLength}).toString('base64')`, () => {
return uuid.toString("base64");
});
bench(`Buffer(${bigBuffer.byteLength}).toString('hex')`, () => {
return bigBuffer.toString("hex");
});
bench(`Buffer(${uuid.byteLength}).toString('hex')`, () => {
return uuid.toString("hex");
});
bench(`Buffer(${bigBuffer.byteLength}).toString('ascii')`, () => {
return bigBuffer.toString("ascii");
});
await run();

BIN
bench/snippets/bun.lockb Executable file

Binary file not shown.

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { readFileSync } from "fs";
import { allocUnsafe } from "bun";

View File

@@ -1,3 +0,0 @@
import { cp } from "fs/promises";
await cp(process.argv[2], process.argv[3], { recursive: true });

View File

@@ -1,31 +0,0 @@
import { mkdirSync, writeFileSync } from "fs";
import { bench, run } from "./runner.mjs";
import { cp } from "fs/promises";
import { join } from "path";
import { tmpdir } from "os";
const hugeDirectory = (() => {
const root = join(tmpdir(), "huge");
const base = join(root, "directory", "for", "benchmarks", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10");
mkdirSync(base, {
recursive: true,
});
for (let i = 0; i < 1000; i++) {
writeFileSync(join(base, "file-" + i + ".txt"), "Hello, world! " + i);
}
return root;
})();
const hugeFilePath = join(tmpdir(), "huge-file-0.txt");
const hugeText = "Hello, world!".repeat(1000000);
writeFileSync(hugeFilePath, hugeText);
var hugeCopyI = 0;
bench("cp -r (1000 files)", async b => {
await cp(hugeDirectory, join(tmpdir(), "huge-copy" + hugeCopyI++), { recursive: true });
});
bench("cp 1 " + ((hugeText.length / 1024) | 0) + " KB file", async b => {
await cp(hugeFilePath, join(tmpdir(), "huge-file" + hugeCopyI++));
});
await run();

View File

@@ -1,29 +0,0 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
const scenarios = [
{ alg: "md5", digest: "hex" },
{ alg: "md5", digest: "base64" },
{ alg: "sha1", digest: "hex" },
{ alg: "sha1", digest: "base64" },
{ alg: "sha256", digest: "hex" },
{ alg: "sha256", digest: "base64" },
];
for (const { alg, digest } of scenarios) {
bench(`${alg}-${digest}`, () => {
createHash(alg).update(data).digest(digest);
});
if ("Bun" in globalThis) {
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
new Bun.CryptoHasher(alg).update(data).digest(digest);
});
}
}
run();

View File

@@ -1,15 +1,15 @@
// so it can run in environments without node module resolution
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import crypto from "node:crypto";
var foo = Buffer.allocUnsafe(512);
var foo = Buffer.allocUnsafe(16384);
foo.fill(123);
// if ("Bun" in globalThis) {
// const { CryptoHasher } = Bun;
// bench("Bun.CryptoHasher(sha512)", () => {
// var hasher = new CryptoHasher("sha512");
// bench("CryptoHasher Blake2b256", () => {
// var hasher = new CryptoHasher("blake2b256");
// hasher.update(foo);
// hasher.digest();
// });

View File

@@ -1,26 +0,0 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
const scenarios = [
{ alg: "md5", digest: "hex" },
{ alg: "md5", digest: "base64" },
{ alg: "sha1", digest: "hex" },
{ alg: "sha1", digest: "base64" },
{ alg: "sha256", digest: "hex" },
{ alg: "sha256", digest: "base64" },
];
for (const { alg, digest } of scenarios) {
bench(`${alg}-${digest}`, () => {
const hasher = createHash(alg);
hasher.write(data);
hasher.end();
hasher.read();
});
}
run();

View File

@@ -1,6 +1,12 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import crypto from "node:crypto";
var crypto = globalThis.crypto;
if (!crypto) {
crypto = await import("node:crypto");
}
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);
@@ -16,8 +22,4 @@ bench("crypto.randomUUID()", () => {
return crypto.randomUUID()[2];
});
bench("crypto.randomInt()", () => {
return crypto.randomInt(0, 100);
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import fastDeepEquals from "fast-deep-equal/es6/index";
// const Date = globalThis.Date;

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,5 +1,5 @@
import { dns } from "bun";
import { bench, run, group } from "./runner.mjs";
import { bench, run, group } from "mitata";
async function forEachBackend(name, fn) {
group(name, () => {

101
bench/snippets/emitter.mjs Normal file
View File

@@ -0,0 +1,101 @@
// **so this file can run in node**
import { createRequire } from "node:module";
const require = createRequire(import.meta.url);
// --
const EventEmitterNative = require("node:events").EventEmitter;
const TypedEmitter = require("tiny-typed-emitter").TypedEmitter;
const EventEmitter3 = require("eventemitter3").EventEmitter;
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const event = new Event("hello");
var id = 0;
for (let [EventEmitter, className] of [
[EventEmitterNative, "EventEmitter"],
[TypedEmitter, "TypedEmitter"],
[EventEmitter3, "EventEmitter3"],
]) {
const emitter = new EventEmitter();
emitter.on("hello", event => {
event.preventDefault();
});
bench(`${className}.emit`, () => {
emitter.emit("hello", {
preventDefault() {
id++;
},
});
});
bench(`${className}.on x 10_000 (handler)`, () => {
var cb = event => {
event.preventDefault();
};
emitter.on("hey", cb);
var called = false;
for (let i = 0; i < 10_000; i++)
emitter.emit("hey", {
preventDefault() {
id++;
called = true;
},
});
emitter.off("hey", cb);
if (!called) throw new Error("not called");
});
if (EventEmitter !== EventEmitter3) {
var monkey = Object.assign({}, EventEmitter.prototype);
monkey.on("hello", event => {
event.preventDefault();
});
bench(`[monkey] ${className}.emit`, () => {
var called = false;
monkey.emit("hello", {
preventDefault() {
id++;
called = true;
},
});
if (!called) {
throw new Error("monkey failed");
}
});
bench(`[monkey] ${className}.on x 10_000 (handler)`, () => {
var cb = () => {
event.preventDefault();
};
monkey.on("hey", cb);
for (let i = 0; i < 10_000; i++)
monkey.emit("hey", {
preventDefault() {
id++;
},
});
monkey.off("hey", cb);
});
}
}
var target = new EventTarget();
target.addEventListener("hello", event => {});
bench("EventTarget.dispatch", () => {
target.dispatchEvent(event);
});
var hey = new Event("hey");
bench("EventTarget.on x 10_000 (handler)", () => {
var handler = event => {};
target.addEventListener("hey", handler);
for (let i = 0; i < 10_000; i++) target.dispatchEvent(hey);
target.removeEventListener("hey", handler);
});
await run();

View File

@@ -1,12 +0,0 @@
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {
Error.captureStackTrace(err);
});
bench("Error.prototype.stack", () => {
new Error().stack;
});
await run();

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
import { encode as htmlEntityEncode } from "html-entities";
import { escape as heEscape } from "he";

View File

@@ -1,5 +1,5 @@
import { viewSource, dlopen, CString, ptr, toBuffer, toArrayBuffer, FFIType, callback } from "bun:ffi";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
const types = {
returns_true: {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,57 +0,0 @@
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
boop: {
"restApiRoot": "/api",
"host": "0.0.0.0",
"port": 3000,
"remoting": {
"context": false,
"rest": {
"handleErrors": false,
"normalizeHttpPath": false,
"xml": false,
},
"json": {
"strict": false,
"limit": "100kb",
},
"urlencoded": {
"extended": true,
"limit": "100kb",
},
"cors": false,
},
},
},
"cors": false,
},
};
var big = JSON.stringify(obj);
bench("JSON.parse(obj)", () => {
globalThis.foo = JSON.parse(big);
});
bench("JSON.stringify(obj)", () => {
globalThis.bar = JSON.stringify(obj);
});
await run();

View File

@@ -1,65 +0,0 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
return 42;
},
set() {
throw new Error("bad");
},
configurable: true,
});
if (module.exports !== 42) throw new Error("bad");
if (!Object.getOwnPropertyDescriptor(module, "exports").get) throw new Error("bad");
});
bench("Object.defineProperty(module.exports = {})", () => {
Object.defineProperty(module, "exports", {
value: { abc: 123 },
});
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
bench("module.exports = {}", () => {
module.exports = { abc: 123 };
if (!module.exports.abc) throw new Error("bad");
if (Object.getOwnPropertyDescriptor(module, "exports").value !== module.exports) throw new Error("bad");
});
run().then(() => {
module.exports = {
a: 1,
};
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
module.exports = function lol() {
return 42;
};
console.log(module.exports, module.exports());
queueMicrotask(() => {
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,
__dirname,
Object.keys(require(module.id)),
require(module.id),
);
});
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,42 +0,0 @@
// @runtime node, bun
import { bench, run } from "./runner.mjs";
import * as vm from "node:vm";
const context = {
animal: "cat",
count: 2,
};
const script = new vm.Script("animal = 'hey'");
vm.createContext(context);
bench("vm.Script.runInContext", () => {
script.runInContext(context);
});
bench("vm.Script.runInThisContext", () => {
script.runInThisContext(context);
});
bench("vm.Script.runInNewContext", () => {
script.runInNewContext(context);
});
bench("vm.runInContext", () => {
vm.runInContext("animal = 'hey'", context);
});
bench("vm.runInNewContext", () => {
vm.runInNewContext("animal = 'hey'", context);
});
bench("vm.runInThisContext", () => {
vm.runInThisContext("animal = 'hey'", context);
});
bench("vm.createContext", () => {
vm.createContext({ yo: 1 });
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -0,0 +1,7 @@
{
"dependencies": {
"eventemitter3": "^5.0.0",
"tiny-typed-emitter": "latest"
},
"prettier": "../../.prettierrc.cjs"
}

View File

@@ -1,5 +1,5 @@
import { group } from "./runner.mjs";
import { bench, run } from "./runner.mjs";
import { group } from "mitata";
import { bench, run } from "mitata";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,100 +0,0 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// This is a benchmark of the performance impact of using private properties.
bench("Polyfillprivate", () => {
"use strict";
var __classPrivateFieldGet =
(this && this.__classPrivateFieldGet) ||
function (receiver, state, kind, f) {
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
throw new TypeError("Cannot read private member from an object whose class did not declare it");
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
};
var __classPrivateFieldSet =
(this && this.__classPrivateFieldSet) ||
function (receiver, state, value, kind, f) {
if (kind === "m") throw new TypeError("Private method is not writable");
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
throw new TypeError("Cannot write private member to an object whose class did not declare it");
return kind === "a" ? f.call(receiver, value) : f ? (f.value = value) : state.set(receiver, value), value;
};
var _Foo_state, _Foo_inc;
class Foo {
constructor() {
_Foo_state.set(this, 1);
_Foo_inc.set(this, 13);
}
run() {
let n = 1000000;
while (n-- > 0) {
__classPrivateFieldSet(
this,
_Foo_state,
__classPrivateFieldGet(this, _Foo_state, "f") + __classPrivateFieldGet(this, _Foo_inc, "f"),
"f",
);
}
return n;
}
}
(_Foo_state = new WeakMap()), (_Foo_inc = new WeakMap());
new Foo().run();
});
bench("NativePrivates", () => {
class Foo {
#state = 1;
#inc = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this.#state += this.#inc;
}
return n;
}
}
new Foo().run();
});
bench("ConventionalPrivates", () => {
class Foo {
_state = 1;
_inc = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this._state += this._inc;
}
return n;
}
}
new Foo().run();
});
const _state = Symbol("state");
const _inc = Symbol("inc");
bench("SymbolPrivates", () => {
class Foo {
[_state] = 1;
[_inc] = 13;
run() {
let n = 1000000;
while (n-- > 0) {
this[_state] += this[_inc];
}
return n;
}
}
new Foo().run();
});
await run();

View File

@@ -1,33 +0,0 @@
import { bench, run } from "./runner.mjs";
import { performance } from "perf_hooks";
bench("process.memoryUsage()", () => {
process.memoryUsage();
});
bench("process.memoryUsage.rss()", () => {
process.memoryUsage.rss();
});
bench("process.cpuUsage()", () => {
process.cpuUsage();
});
const init = process.cpuUsage();
bench("process.cpuUsage(delta)", () => {
process.cpuUsage(init);
});
bench("performance.now()", () => {
performance.now();
});
bench("process.hrtime()", () => {
process.hrtime();
});
bench("process.hrtime.bigint()", () => {
process.hrtime.bigint();
});
await run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "mitata";
import { renderToReadableStream } from "react-dom/server.browser";
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
var short = (function () {
const text = "Hello World!";

View File

@@ -1,5 +1,5 @@
import { readdirSync } from "fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "mitata";
import { argv } from "process";
const dir = argv.length > 2 ? argv[2] : "/tmp";

View File

@@ -1,17 +0,0 @@
import { bench, run } from "./runner.mjs";
import { readFileSync, existsSync } from "node:fs";
import { readFile } from "node:fs/promises";
bench(`readFileSync(/tmp/404-not-found)`, () => {
try {
readFileSync("/tmp/404-not-found");
} catch (e) {}
});
bench(`readFile(/tmp/404-not-found)`, async () => {
try {
await readFile("/tmp/404-not-found");
} catch (e) {}
});
await run();

View File

@@ -1,10 +1,4 @@
import { realpathSync } from "node:fs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];
import { bench, run } from "./runner.mjs";
bench("realpathSync x " + count, () => {
for (let i = 0; i < count; i++) realpathSync(arg, "utf-8");
});
await run();
for (let i = 0; i < count; i++) realpathSync(arg);

View File

@@ -1,15 +0,0 @@
// This mostly exists to check for a memory leak in response.clone()
import { bench, run } from "./runner.mjs";
const req = new Request("http://localhost:3000/");
const resp = await fetch("http://example.com");
bench("req.clone().url", () => {
return req.clone().url;
});
bench("resp.clone().url", () => {
return resp.clone().url;
});
await run();

View File

@@ -1,136 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
//
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
// Force the string to be 8bit
const str = String.fromCharCode(
...JSON.stringify(obj)
.split("")
.map(a => a.charCodeAt(0)),
);
var i = 0;
bench("new Response().arrayBuffer() (new string each call, latin1)", async () => {
return await new Response(str + i++).arrayBuffer();
});
bench("new Response().arrayBuffer() (new string each call, utf16)", async () => {
return await new Response(str + i++ + "😊").arrayBuffer();
});
bench("new Response().arrayBuffer() (existing string, latin1)", async () => {
return await new Response(str).arrayBuffer();
});
await run();

View File

@@ -1,123 +0,0 @@
// This snippet mostly exists to reproduce a memory leak
import { bench, run } from "mitata";
const obj = {
"id": 1296269,
"node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"owner": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": false,
},
"private": false,
"html_url": "https://github.com/octocat/Hello-World",
"description": "This your first repo!",
"fork": false,
"url": "https://api.github.com/repos/octocat/Hello-World",
"archive_url": "https://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}",
"assignees_url": "https://api.github.com/repos/octocat/Hello-World/assignees{/user}",
"blobs_url": "https://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}",
"branches_url": "https://api.github.com/repos/octocat/Hello-World/branches{/branch}",
"collaborators_url": "https://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}",
"comments_url": "https://api.github.com/repos/octocat/Hello-World/comments{/number}",
"commits_url": "https://api.github.com/repos/octocat/Hello-World/commits{/sha}",
"compare_url": "https://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}",
"contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/{+path}",
"contributors_url": "https://api.github.com/repos/octocat/Hello-World/contributors",
"deployments_url": "https://api.github.com/repos/octocat/Hello-World/deployments",
"downloads_url": "https://api.github.com/repos/octocat/Hello-World/downloads",
"events_url": "https://api.github.com/repos/octocat/Hello-World/events",
"forks_url": "https://api.github.com/repos/octocat/Hello-World/forks",
"git_commits_url": "https://api.github.com/repos/octocat/Hello-World/git/commits{/sha}",
"git_refs_url": "https://api.github.com/repos/octocat/Hello-World/git/refs{/sha}",
"git_tags_url": "https://api.github.com/repos/octocat/Hello-World/git/tags{/sha}",
"git_url": "git:github.com/octocat/Hello-World.git",
"issue_comment_url": "https://api.github.com/repos/octocat/Hello-World/issues/comments{/number}",
"issue_events_url": "https://api.github.com/repos/octocat/Hello-World/issues/events{/number}",
"issues_url": "https://api.github.com/repos/octocat/Hello-World/issues{/number}",
"keys_url": "https://api.github.com/repos/octocat/Hello-World/keys{/key_id}",
"labels_url": "https://api.github.com/repos/octocat/Hello-World/labels{/name}",
"languages_url": "https://api.github.com/repos/octocat/Hello-World/languages",
"merges_url": "https://api.github.com/repos/octocat/Hello-World/merges",
"milestones_url": "https://api.github.com/repos/octocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}",
"pulls_url": "https://api.github.com/repos/octocat/Hello-World/pulls{/number}",
"releases_url": "https://api.github.com/repos/octocat/Hello-World/releases{/id}",
"ssh_url": "git@github.com:octocat/Hello-World.git",
"stargazers_url": "https://api.github.com/repos/octocat/Hello-World/stargazers",
"statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/{sha}",
"subscribers_url": "https://api.github.com/repos/octocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/octocat/Hello-World/subscription",
"tags_url": "https://api.github.com/repos/octocat/Hello-World/tags",
"teams_url": "https://api.github.com/repos/octocat/Hello-World/teams",
"trees_url": "https://api.github.com/repos/octocat/Hello-World/git/trees{/sha}",
"clone_url": "https://github.com/octocat/Hello-World.git",
"mirror_url": "git:git.example.com/octocat/Hello-World",
"hooks_url": "https://api.github.com/repos/octocat/Hello-World/hooks",
"svn_url": "https://svn.github.com/octocat/Hello-World",
"homepage": "https://github.com",
"language": null,
"forks_count": 9,
"stargazers_count": 80,
"watchers_count": 80,
"size": 108,
"default_branch": "master",
"open_issues_count": 0,
"is_template": false,
"topics": ["octocat", "atom", "electron", "api"],
"has_issues": true,
"has_projects": true,
"has_wiki": true,
"has_pages": false,
"has_downloads": true,
"has_discussions": false,
"archived": false,
"disabled": false,
"visibility": "public",
"pushed_at": "2011-01-26T19:06:43Z",
"created_at": "2011-01-26T19:01:12Z",
"updated_at": "2011-01-26T19:14:43Z",
"permissions": {
"admin": false,
"push": false,
"pull": true,
},
"security_and_analysis": {
"advanced_security": {
"status": "enabled",
},
"secret_scanning": {
"status": "enabled",
},
"secret_scanning_push_protection": {
"status": "disabled",
},
},
};
bench("Response.json(obj)", async () => {
return Response.json(obj);
});
bench("Response.json(obj).json()", async () => {
return await Response.json(obj).json();
});
await run();

Some files were not shown because too many files have changed in this diff Show More