mirror of
https://github.com/oven-sh/bun
synced 2026-02-06 00:48:55 +00:00
Compare commits
1 Commits
test-ecosy
...
fix-prepar
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
baa8934545 |
@@ -1,15 +1,17 @@
|
||||
**/*.a
|
||||
**/*.o
|
||||
**/.next
|
||||
**/CMakeCache.txt
|
||||
**/node_modules
|
||||
.git
|
||||
examples
|
||||
node_modules
|
||||
**/node_modules
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-out
|
||||
zig-build
|
||||
**/*.o
|
||||
**/*.a
|
||||
|
||||
examples
|
||||
|
||||
**/.next
|
||||
.git
|
||||
src/bun.js/WebKit
|
||||
**/CMakeCache.txt
|
||||
packages/**/bun
|
||||
packages/**/bun-profile
|
||||
src/bun.js/WebKit
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-build
|
||||
zig-cache
|
||||
zig-out
|
||||
37
.gitattributes
vendored
37
.gitattributes
vendored
@@ -17,28 +17,37 @@
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
*.lockb binary diff=lockb
|
||||
|
||||
.vscode/launch.json linguist-generated
|
||||
src/api/schema.d.ts linguist-generated
|
||||
fixture.*.c linguist-generated
|
||||
src/api/schema.js linguist-generated
|
||||
*-fixture* linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
|
||||
|
||||
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
|
||||
*.lockb binary diff=lockb
|
||||
src/bun.js/bindings/simdutf.cpp linguist-vendored
|
||||
src/bun.js/bindings/simdutf.h linguist-vendored
|
||||
|
||||
src/js/out/WebCoreJSBuiltins.cpp linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.h linguist-generated
|
||||
src/js/out/WebCoreJSBuiltins.d.ts linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/JSSink.h linguist-generated
|
||||
src/bun.js/bindings/JSSink.zig linguist-generated
|
||||
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMClientIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+DOMIsoSubspaces.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureHeader.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedClasses+lazyStructureImpl.h linguist-generated
|
||||
|
||||
docs/**/* linguist-documentation
|
||||
|
||||
# Don't count tests in the language stats - https://github.com/github-linguist/linguist/blob/master/docs/overrides.md
|
||||
test/**/* linguist-documentation
|
||||
bench/**/* linguist-documentation
|
||||
examples/**/* linguist-documentation
|
||||
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/3-feature-request.yml
vendored
2
.github/ISSUE_TEMPLATE/3-feature-request.yml
vendored
@@ -8,7 +8,7 @@ body:
|
||||
Thank you for submitting an idea. It helps make Bun better.
|
||||
|
||||
If you want to discuss Bun, or learn how others are using Bun, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback-ideas`](https://discord.gg/unwUnHBNqy) channel.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the problem this feature would solve?
|
||||
|
||||
18
.github/pull_request_template.md
vendored
18
.github/pull_request_template.md
vendored
@@ -19,17 +19,18 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
- [ ] I ran `make js` and committed the transpiled changes
|
||||
- [ ] I or my editor ran Prettier on the changed files (or I ran `bun fmt`)
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I or my editor ran `zig fmt` on the changed files
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
@@ -42,6 +43,17 @@ This adds a new flag --bail to bun test. When set, it will stop running tests af
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If functions were added to exports.zig or bindings.zig
|
||||
|
||||
- [ ] I ran `make headers` to regenerate the C header file
|
||||
|
||||
-->
|
||||
|
||||
<!-- If \*.classes.ts files were added or changed:
|
||||
|
||||
- [ ] I ran `make codegen` to regenerate the C++ and Zig code
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
|
||||
50
.github/workflows/bun-ecosystem-test.yml
vendored
Normal file
50
.github/workflows/bun-ecosystem-test.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: bun-ecosystem-test
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 15 * * *" # every day at 7am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "The version of Bun to run"
|
||||
required: true
|
||||
default: "canary"
|
||||
type: string
|
||||
jobs:
|
||||
test:
|
||||
name: ${{ matrix.tag }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64
|
||||
url: linux/x64?avx2=true
|
||||
- os: ubuntu-latest
|
||||
tag: linux-x64-baseline
|
||||
url: linux/x64?baseline=true
|
||||
# FIXME: runner fails with "No tests found"?
|
||||
#- os: macos-latest
|
||||
# tag: darwin-x64
|
||||
# url: darwin/x64?avx2=true
|
||||
- os: macos-latest
|
||||
tag: darwin-x64-baseline
|
||||
url: darwin/x64?baseline=true
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: Bhacaz/checkout-files@v2
|
||||
with:
|
||||
files: packages/bun-internal-test
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-download-url: https://bun.sh/download/${{ github.event.inputs.version }}/${{ matrix.url }}
|
||||
- id: test
|
||||
name: Test
|
||||
working-directory: packages/bun-internal-test
|
||||
run: bun run test:ecosystem
|
||||
29
.github/workflows/bun-linux-aarch64.yml
vendored
29
.github/workflows/bun-linux-aarch64.yml
vendored
@@ -4,6 +4,11 @@ concurrency:
|
||||
group: bun-linux-aarch64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -11,17 +16,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -43,7 +37,10 @@ jobs:
|
||||
arch: aarch64
|
||||
build_arch: arm64
|
||||
runner: linux-arm64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-linux-arm64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-arm64-lto"
|
||||
build_machine_arch: aarch64
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
@@ -75,7 +72,9 @@ jobs:
|
||||
BUILDARCH=${{matrix.build_arch}}
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
WEBKIT_URL=${{matrix.webkit_url}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
WEBKIT_BASENAME=${{matrix.webkit_basename}}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: artifact
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
@@ -111,6 +110,14 @@ jobs:
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-${{matrix.tag}}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: bun-obj-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release/bun-obj
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-dependencies
|
||||
path: ${{runner.temp}}/release/bun-dependencies
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
|
||||
108
.github/workflows/bun-linux-build.yml
vendored
108
.github/workflows/bun-linux-build.yml
vendored
@@ -4,6 +4,11 @@ concurrency:
|
||||
group: bun-linux-build-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -11,7 +16,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -21,7 +25,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -44,44 +47,22 @@ jobs:
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
assertions: "OFF"
|
||||
zig_optimize: "ReleaseFast"
|
||||
target: "artifact"
|
||||
- cpu: nehalem
|
||||
tag: linux-x64-baseline
|
||||
arch: x86_64
|
||||
build_arch: amd64
|
||||
runner: big-ubuntu
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-linux-amd64-lto.tar.gz"
|
||||
webkit_basename: "bun-webkit-linux-amd64-lto"
|
||||
build_machine_arch: x86_64
|
||||
assertions: "OFF"
|
||||
zig_optimize: "ReleaseFast"
|
||||
target: "artifact"
|
||||
# - cpu: haswell
|
||||
# tag: linux-x64-assertions
|
||||
# arch: x86_64
|
||||
# build_arch: amd64
|
||||
# runner: big-ubuntu
|
||||
# build_machine_arch: x86_64
|
||||
# assertions: "ON"
|
||||
# zig_optimize: "ReleaseSafe"
|
||||
# target: "artifact-assertions"
|
||||
# - cpu: nehalem
|
||||
# tag: linux-x64-baseline-assertions
|
||||
# arch: x86_64
|
||||
# build_arch: amd64
|
||||
# runner: big-ubuntu
|
||||
# build_machine_arch: x86_64
|
||||
# assertions: "ON"
|
||||
# zig_optimize: "ReleaseSafe"
|
||||
# target: "artifact-assertions"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: ${{github.sha}}
|
||||
clean: true
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
@@ -109,17 +90,11 @@ jobs:
|
||||
BUILDARCH=${{matrix.build_arch}}
|
||||
BUILD_MACHINE_ARCH=${{matrix.build_machine_arch}}
|
||||
CPU_TARGET=${{matrix.cpu}}
|
||||
WEBKIT_URL=${{matrix.webkit_url}}
|
||||
GIT_SHA=${{github.sha}}
|
||||
ASSERTIONS=${{matrix.assertions}}
|
||||
ZIG_OPTIMIZE=${{matrix.zig_optimize}}
|
||||
SCCACHE_BUCKET=bun
|
||||
SCCACHE_REGION=auto
|
||||
SCCACHE_S3_USE_SSL=true
|
||||
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
|
||||
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
|
||||
WEBKIT_BASENAME=${{matrix.webkit_basename}}
|
||||
platforms: linux/${{matrix.build_arch}}
|
||||
target: ${{matrix.target}}
|
||||
target: artifact
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- name: Zip
|
||||
run: |
|
||||
@@ -178,33 +153,13 @@ jobs:
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/release/bun-${{matrix.tag}}.zip,${{runner.temp}}/release/bun-${{matrix.tag}}-profile.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ matrix.tag }}:
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
linux-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ubuntu-latest
|
||||
needs: [linux]
|
||||
if: github.event_name == 'pull_request'
|
||||
timeout-minutes: 20
|
||||
permissions:
|
||||
pull-requests: write
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
failing_tests_count: ${{ steps.test.outputs.failing_tests_count }}
|
||||
@@ -214,23 +169,20 @@ jobs:
|
||||
include:
|
||||
- tag: linux-x64
|
||||
- tag: linux-x64-baseline
|
||||
- tag: linux-x64-assertions
|
||||
- tag: linux-x64-baseline-assertions
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
clean: true
|
||||
- id: download
|
||||
name: Download
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: bun-${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install-bun
|
||||
name: Install Bun
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip bun-${{matrix.tag}}.zip
|
||||
@@ -238,13 +190,6 @@ jobs:
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: install-dependnecies
|
||||
name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -253,24 +198,11 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
sudo apt-get update && sudo apt-get install -y openssl
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: "failure"
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
|
||||
461
.github/workflows/bun-mac-aarch64.yml
vendored
461
.github/workflows/bun-mac-aarch64.yml
vendored
@@ -5,8 +5,9 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -14,7 +15,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -23,7 +23,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -31,134 +30,79 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
macOS-zig:
|
||||
name: macOS Zig Object
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64-baseline
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-obj-darwin-x64
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# - name: Checkout submodules
|
||||
# run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compile Zig Object
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
# This doesnt seem to work
|
||||
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-macos-none
|
||||
GIT_SHA=${{ github.sha }}
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
|
||||
- name: Upload Zig Object
|
||||
uses: actions/upload-artifact@v3
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun-zig.o
|
||||
|
||||
macOS-dependencies:
|
||||
name: macOS Dependencies
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 15
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
- name: Hash submodule versions
|
||||
run: |
|
||||
print_data() {
|
||||
git submodule | grep -v WebKit
|
||||
llvm-config --version
|
||||
rustc --version
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
|
||||
id: submodule-versions
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
id: cache-deps-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
|
||||
- name: Compile submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
bash ./scripts/clean-dependencies.sh
|
||||
bash ./scripts/all-dependencies.sh
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
id: cache-deps-save
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Upload submodule dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -167,145 +111,257 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
dependencies: true
|
||||
compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
# TODO: replace with sccache
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
|
||||
- name: Compile C++
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
|
||||
bash compile-cpp-only.sh -v
|
||||
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make clean-bindings
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
macOS-link:
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-zig, macOS-cpp, macOS-dependencies]
|
||||
timeout-minutes: 60
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
permissions: write-all
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: native
|
||||
arch: aarch64
|
||||
tag: bun-darwin-aarch64
|
||||
obj: bun-obj-darwin-aarch64
|
||||
package: bun-darwin-aarch64
|
||||
artifact: bun-obj-darwin-aarch64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
ref: ${{github.sha}}
|
||||
clean: true
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-${{matrix.arch}}.zip"
|
||||
unzip bun-darwin-${{matrix.arch}}.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-${{matrix.arch}}/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
|
||||
- name: Download Zig Object
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
|
||||
- name: Downloaded submodule dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
@@ -315,11 +371,11 @@ jobs:
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
@@ -336,33 +392,12 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ matrix.tag }}:
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS-link]
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
@@ -385,8 +420,8 @@ jobs:
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install-bun
|
||||
name: Install Bun
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
@@ -394,12 +429,6 @@ jobs:
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -408,24 +437,10 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: "failure"
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
|
||||
483
.github/workflows/bun-mac-x64-baseline.yml
vendored
483
.github/workflows/bun-mac-x64-baseline.yml
vendored
@@ -5,8 +5,9 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -14,7 +15,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -23,7 +23,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -48,129 +47,62 @@ jobs:
|
||||
# arch: aarch64
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compile Zig Object
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
# This doesnt seem to work
|
||||
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# This was used before, but also does not really work
|
||||
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-macos-none
|
||||
GIT_SHA=${{ github.sha }}
|
||||
|
||||
SCCACHE_BUCKET=bun
|
||||
SCCACHE_REGION=auto
|
||||
SCCACHE_S3_USE_SSL=true
|
||||
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
|
||||
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
|
||||
- name: Upload Zig Object
|
||||
uses: actions/upload-artifact@v3
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun-zig.o
|
||||
macOS-dependencies:
|
||||
name: macOS Dependencies
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 15
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
- name: Hash submodule versions
|
||||
run: |
|
||||
print_data() {
|
||||
git submodule | grep -v WebKit
|
||||
llvm-config --version
|
||||
rustc --version
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
|
||||
id: submodule-versions
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
id: cache-deps-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
|
||||
- name: Compile submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
bash ./scripts/clean-dependencies.sh
|
||||
bash ./scripts/all-dependencies.sh
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
id: cache-deps-save
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Upload submodule dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -185,66 +117,136 @@ jobs:
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: nehalem
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64-baseline
|
||||
obj: bun-obj-darwin-x64-baseline
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
|
||||
unzip bun-darwin-x64-baseline.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
# TODO: replace with sccache
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
|
||||
- name: Compile C++
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
|
||||
bash compile-cpp-only.sh -v
|
||||
|
||||
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.dependencies
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
- name: ccache (c++)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.compile_obj
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macos-object-files, macOS-dependencies]
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
permissions: write-all
|
||||
strategy:
|
||||
@@ -257,70 +259,113 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64-baseline
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: haswell
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64
|
||||
# obj: bun-obj-darwin-x64
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
brew install ccache rust llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
|
||||
unzip bun-darwin-x64-baseline.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
|
||||
- name: Download Zig Object
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
|
||||
- name: Downloaded submodule dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
@@ -330,11 +375,11 @@ jobs:
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
@@ -351,33 +396,12 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ matrix.tag }}:
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: macOS Test
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
@@ -400,8 +424,8 @@ jobs:
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install-bun
|
||||
name: Install Bun
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
@@ -409,12 +433,6 @@ jobs:
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -423,27 +441,10 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: "failure"
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
Hey @${{ github.actor }},
|
||||
|
||||
${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
|
||||
487
.github/workflows/bun-mac-x64.yml
vendored
487
.github/workflows/bun-mac-x64.yml
vendored
@@ -5,8 +5,9 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
TEST_TAG: bun-test'
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -14,7 +15,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -23,7 +23,6 @@ on:
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
@@ -31,8 +30,8 @@ on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
macOS-zig:
|
||||
name: macOS Zig Object
|
||||
macos-object-files:
|
||||
name: macOS Object
|
||||
runs-on: med-ubuntu
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
strategy:
|
||||
@@ -44,131 +43,66 @@ jobs:
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-obj-darwin-x64
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-obj-darwin-aarch64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Run
|
||||
run: |
|
||||
rm -rf ${{runner.temp}}/release
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compile Zig Object
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
# This doesnt seem to work
|
||||
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# This was used before, but also does not really work
|
||||
cache-from: type=local,src=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache-${{matrix.tag}}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=amd64
|
||||
BUILD_MACHINE_ARCH=x86_64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{ matrix.arch }}-macos-none
|
||||
GIT_SHA=${{ github.sha }}
|
||||
|
||||
SCCACHE_BUCKET=bun
|
||||
SCCACHE_REGION=auto
|
||||
SCCACHE_S3_USE_SSL=true
|
||||
SCCACHE_ENDPOINT=${{ secrets.CACHE_S3_ENDPOINT }}
|
||||
AWS_ACCESS_KEY_ID=${{ secrets.CACHE_S3_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/amd64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
|
||||
- name: Upload Zig Object
|
||||
uses: actions/upload-artifact@v3
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'ARM64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
build-args: |
|
||||
ARCH=${{ matrix.arch }}
|
||||
BUILDARCH=arm64
|
||||
BUILD_MACHINE_ARCH=aarch64
|
||||
CPU_TARGET=${{ matrix.cpu }}
|
||||
TRIPLET=${{matrix.arch}}-macos-none
|
||||
GIT_SHA=${{github.sha}}
|
||||
platforms: linux/arm64
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}
|
||||
path: ${{runner.temp}}/release/bun-zig.o
|
||||
|
||||
macOS-dependencies:
|
||||
name: macOS Dependencies
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 15
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
- name: Hash submodule versions
|
||||
run: |
|
||||
print_data() {
|
||||
git submodule | grep -v WebKit
|
||||
llvm-config --version
|
||||
rustc --version
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "sha=$(print_data | sha1sum | cut -c 1-10)" >> $GITHUB_OUTPUT
|
||||
id: submodule-versions
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
id: cache-deps-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: bun-deps-${{ matrix.tag }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
|
||||
- name: Compile submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
bash ./scripts/clean-dependencies.sh
|
||||
bash ./scripts/all-dependencies.sh
|
||||
|
||||
- name: Cache submodule dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
id: cache-deps-save
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
|
||||
|
||||
- name: Upload submodule dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
path: ${{runner.temp}}/release/bun.o
|
||||
macOS-cpp:
|
||||
name: macOS C++
|
||||
runs-on: ${{ matrix.runner }}
|
||||
@@ -177,77 +111,157 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: true
|
||||
# compile_obj: false
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: true
|
||||
compile_obj: false
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# dependencies: false
|
||||
# compile_obj: true
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
obj: bun-obj-darwin-x64
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
dependencies: false
|
||||
compile_obj: true
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
# dependencies: true
|
||||
# compile_obj: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install sccache ccache rust llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config --force
|
||||
# echo "$(brew --prefix sccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64.zip"
|
||||
unzip bun-darwin-x64.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-x64/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
# TODO: replace with sccache
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}
|
||||
|
||||
- name: Compile C++
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
|
||||
bash compile-cpp-only.sh -v
|
||||
|
||||
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: Download WebKit
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: ccache (dependencies)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.dependencies
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-dependencies
|
||||
- name: ccache (c++)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
if: matrix.compile_obj
|
||||
with:
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-obj
|
||||
- name: Compile dependencies
|
||||
if: matrix.dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make vendor-without-check
|
||||
- name: Compile C++
|
||||
if: matrix.compile_obj
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR $BUN_DEPS_OUT_DIR
|
||||
make -j $(sysctl -n hw.ncpu) release-bindings
|
||||
- name: Upload C++
|
||||
if: matrix.compile_obj
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Upload Dependencies
|
||||
if: matrix.dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
macOS:
|
||||
name: macOS Link
|
||||
runs-on: ${{ matrix.runner }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
needs: [macOS-cpp, macOS-zig, macOS-dependencies]
|
||||
needs: [macOS-cpp, macos-object-files]
|
||||
timeout-minutes: 90
|
||||
permissions: write-all
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# - cpu: nehalem
|
||||
# arch: x86_64
|
||||
# tag: bun-darwin-x64-baseline
|
||||
# obj: bun-obj-darwin-x64-baseline
|
||||
# package: bun-darwin-x64
|
||||
# runner: macos-12
|
||||
# artifact: bun-obj-darwin-x64-baseline
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
- cpu: haswell
|
||||
arch: x86_64
|
||||
tag: bun-darwin-x64
|
||||
@@ -255,71 +269,105 @@ jobs:
|
||||
package: bun-darwin-x64
|
||||
runner: macos-12
|
||||
artifact: bun-obj-darwin-x64
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-oct3-4/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-amd64-lto.tar.gz"
|
||||
# - cpu: native
|
||||
# arch: aarch64
|
||||
# tag: bun-darwin-aarch64
|
||||
# obj: bun-obj-darwin-aarch64
|
||||
# package: bun-darwin-aarch64
|
||||
# artifact: bun-obj-darwin-aarch64
|
||||
# webkit_url: "https://github.com/oven-sh/WebKit/releases/download/2023-sept15-1/bun-webkit-macos-arm64-lto.tar.gz"
|
||||
# runner: macos-arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Checkout submodules
|
||||
run: git submodule update --init --recursive --depth=1 --progress --force
|
||||
|
||||
- name: Install system dependencies
|
||||
run: git submodule update --init --recursive --depth=1 --progress -j $(sysctl -n hw.ncpu) --force
|
||||
- name: Install dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
run: |
|
||||
brew install ccache llvm@$LLVM_VERSION pkg-config coreutils libtool cmake libiconv openssl@1.1 ninja --force
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
brew install rust ccache llvm@16 pkg-config coreutils libtool cmake libiconv automake openssl@1.1 ninja gnu-sed pkg-config esbuild --force
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
|
||||
curl -LO "$BUN_DOWNLOAD_URL_BASE/bun-darwin-x64-baseline.zip"
|
||||
unzip bun-darwin-x64-baseline.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv bun-darwin-x64-baseline/bun ${{ runner.temp }}/.bun/bin/bun
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/bun
|
||||
echo "${{ runner.temp }}/.bun/bin" >> $GITHUB_PATH
|
||||
|
||||
echo "$(brew --prefix llvm@16)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@16
|
||||
- name: Download WebKit
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
rm -rf $JSC_BASE_DIR
|
||||
mkdir -p $JSC_BASE_DIR
|
||||
curl -L ${{ matrix.webkit_url }} | tar -xz -C $JSC_BASE_DIR --strip-components=1
|
||||
- name: Download C++
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
|
||||
- name: Download Zig Object
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.obj }}
|
||||
path: ${{ runner.temp }}/release
|
||||
|
||||
- name: Downloaded submodule dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
- name: ccache (link)
|
||||
uses: hendrikmuhs/ccache-action@v1.2
|
||||
with:
|
||||
name: ${{ matrix.tag }}-deps
|
||||
path: ${{runner.temp}}/bun-deps
|
||||
|
||||
key: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
restore-keys: ${{ runner.os }}-ccache-${{ matrix.tag }}-link
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{runner.temp}}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
rm -rf packages/${{ matrix.package }}
|
||||
mkdir -p packages/${{ matrix.package }}
|
||||
mv ${{ runner.temp }}/release/* packages/${{ matrix.package }}/
|
||||
make bun-link-lld-release copy-to-bun-release-dir-bin
|
||||
- name: Zip
|
||||
env:
|
||||
CPU_TARGET: ${{ matrix.cpu }}
|
||||
JSC_BASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
JSC_LIB: ${{ runner.temp }}/bun-webkit/lib
|
||||
BUN_DEPLOY_DIR: ${{ runner.temp }}/release/bun
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{runner.temp}}/bun-deps
|
||||
BUN_RELEASE_DIR: ${{ runner.temp }}/release
|
||||
WEBKIT_RELEASE_DIR: ${{ runner.temp }}/bun-webkit
|
||||
WEBKIT_RELEASE_DIR_LTO: ${{ runner.temp }}/bun-webkit
|
||||
run: |
|
||||
cd ${{runner.temp}}/link-build
|
||||
cd ${{runner.temp}}/release
|
||||
chmod +x bun-profile bun
|
||||
|
||||
mkdir -p ${{matrix.tag}}-profile/ ${{matrix.tag}}/
|
||||
mkdir ${{matrix.tag}}-profile
|
||||
mkdir ${{matrix.tag}}
|
||||
|
||||
/usr/bin/strip -S bun
|
||||
|
||||
mv bun-profile ${{matrix.tag}}-profile/bun-profile
|
||||
mv bun ${{matrix.tag}}/bun
|
||||
@@ -329,11 +377,11 @@ jobs:
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}-profile
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}-profile.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/link-build/${{matrix.tag}}.zip
|
||||
path: ${{runner.temp}}/release/${{matrix.tag}}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
@@ -350,33 +398,12 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{runner.temp}}/link-build/${{matrix.tag}}.zip,${{runner.temp}}/link-build/${{matrix.tag}}-profile.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ matrix.tag }}:
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
artifacts: "${{runner.temp}}/release/${{matrix.tag}}.zip,${{runner.temp}}/release/${{matrix.tag}}-profile.zip"
|
||||
macOS-test:
|
||||
name: Tests ${{matrix.tag}}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
needs: [macOS]
|
||||
if: github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'
|
||||
permissions:
|
||||
pull-requests: write
|
||||
timeout-minutes: 30
|
||||
outputs:
|
||||
failing_tests: ${{ steps.test.outputs.failing_tests }}
|
||||
@@ -399,8 +426,8 @@ jobs:
|
||||
with:
|
||||
name: ${{matrix.tag}}
|
||||
path: ${{runner.temp}}/release
|
||||
- id: install-bun
|
||||
name: Install Bun
|
||||
- id: install
|
||||
name: Install
|
||||
run: |
|
||||
cd ${{runner.temp}}/release
|
||||
unzip ${{matrix.tag}}.zip
|
||||
@@ -408,12 +435,6 @@ jobs:
|
||||
chmod +x bun
|
||||
pwd >> $GITHUB_PATH
|
||||
./bun --version
|
||||
- id: install
|
||||
name: Install dependencies
|
||||
run: |
|
||||
bun install --verbose
|
||||
bun install --cwd=test --verbose
|
||||
bun install --cwd=packages/bun-internal-test --verbose
|
||||
- id: test
|
||||
name: Test (node runner)
|
||||
env:
|
||||
@@ -422,24 +443,10 @@ jobs:
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
# if: ${{github.event.inputs.use_bun == 'false'}}
|
||||
run: |
|
||||
bun install
|
||||
bun install --cwd test
|
||||
bun install --cwd packages/bun-internal-test
|
||||
node packages/bun-internal-test/src/runner.node.mjs || true
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: always() && steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: "failure"
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### ❌ [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count }} files with test failures on ${{ matrix.tag }}:
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View test output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
- name: Comment on PR
|
||||
if: steps.test.outputs.failing_tests != '' && github.event_name == 'pull_request'
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
|
||||
179
.github/workflows/bun-release-canary.yml
vendored
Normal file
179
.github/workflows/bun-release-canary.yml
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
name: bun-release-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "canary"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- canary publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
# npm-types:
|
||||
# name: Release types to NPM
|
||||
# runs-on: ubuntu-latest
|
||||
# defaults:
|
||||
# run:
|
||||
# working-directory: packages/bun-types
|
||||
# steps:
|
||||
# - id: checkout
|
||||
# name: Checkout
|
||||
# uses: actions/checkout@v3
|
||||
# - id: setup-node
|
||||
# name: Setup Node.js
|
||||
# uses: actions/setup-node@v3
|
||||
# with:
|
||||
# node-version: latest
|
||||
# - id: setup-bun
|
||||
# name: Setup Bun
|
||||
# uses: oven-sh/setup-bun@v1
|
||||
# with:
|
||||
# bun-version: canary
|
||||
# - id: bun-install
|
||||
# name: Install Dependencies
|
||||
# run: bun install
|
||||
# - id: setup-env
|
||||
# name: Setup Environment
|
||||
# run: |
|
||||
# SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
# VERSION=$(bun --version)
|
||||
# TAG="${VERSION}-canary.$(date '+%Y%m%d').1+${SHA}"
|
||||
# echo "Setup tag: ${TAG}"
|
||||
# echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
# - id: bun-run
|
||||
# name: Build
|
||||
# run: bun run build
|
||||
# env:
|
||||
# BUN_VERSION: ${{ env.TAG }}
|
||||
# - id: npm-publish
|
||||
# name: Release
|
||||
# uses: JS-DevTools/npm-publish@v1
|
||||
# with:
|
||||
# package: packages/bun-types/dist/package.json
|
||||
# token: ${{ secrets.NPM_TOKEN }}
|
||||
# tag: canary
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
tags: canary
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=canary
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- canary
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
54
.github/workflows/bun-release-types-canary.yml
vendored
Normal file
54
.github/workflows/bun-release-types-canary.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
name: bun-release-types-canary
|
||||
concurrency: release-canary
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/bun-types/**"
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
SHA=$(git rev-parse --short "$GITHUB_SHA")
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
250
.github/workflows/bun-release.yml
vendored
250
.github/workflows/bun-release.yml
vendored
@@ -1,95 +1,87 @@
|
||||
name: bun-release
|
||||
concurrency: release
|
||||
env:
|
||||
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
|
||||
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
is-latest:
|
||||
description: Is this the latest release?
|
||||
type: boolean
|
||||
default: false
|
||||
tag:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
description: The tag to publish
|
||||
required: true
|
||||
use-docker:
|
||||
description: Should Docker images be released?
|
||||
type: boolean
|
||||
default: false
|
||||
use-npm:
|
||||
description: Should npm packages be published?
|
||||
type: boolean
|
||||
default: false
|
||||
use-homebrew:
|
||||
description: Should binaries be released to Homebrew?
|
||||
type: boolean
|
||||
default: false
|
||||
use-s3:
|
||||
description: Should binaries be uploaded to S3?
|
||||
type: boolean
|
||||
default: false
|
||||
use-types:
|
||||
description: Should types be released to npm?
|
||||
type: boolean
|
||||
default: false
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
permissions: write-all
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: write
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup GPG
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Bun
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Install Dependencies
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
- id: bun-run
|
||||
name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
permissions: write-all
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Bun
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Install Dependencies
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-npm -- "${{ env.TAG }}" publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
@@ -97,45 +89,41 @@ jobs:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Node.js
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-node
|
||||
name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- name: Setup Bun
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Install Dependencies
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Setup Tag
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
run: |
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- name: Build
|
||||
- id: bun-run
|
||||
name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
|
||||
- name: Release (canary)
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
- name: Release (latest)
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
BUN_VERSION: ${{ env.TAG }}
|
||||
- id: npm-publish
|
||||
name: Release
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/dist/package.json
|
||||
@@ -144,28 +132,20 @@ jobs:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- variant: debian
|
||||
suffix: ""
|
||||
- variant: debian
|
||||
suffix: -debian
|
||||
- variant: slim
|
||||
suffix: -slim
|
||||
dir: debian-slim
|
||||
- variant: alpine
|
||||
suffix: -alpine
|
||||
- variant: distroless
|
||||
suffix: -distroless
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Docker emulator
|
||||
- id: environment
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: qemu
|
||||
name: Setup Docker QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
@@ -177,84 +157,102 @@ jobs:
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: oven/bun
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
|
||||
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
- name: Login to Docker
|
||||
type=match,pattern=(bun-v)?(\d+.\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
type=match,pattern=(bun-v)?(\d+.\d+),group=2,value=${{ env.TAG }}
|
||||
- id: login
|
||||
name: Login to Docker
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Push to Docker
|
||||
- id: push
|
||||
name: Push to Docker
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
|
||||
context: ./dockerhub
|
||||
file: ./dockerhub/Dockerfile-debian
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=${{ env.BUN_VERSION }}
|
||||
BUN_VERSION=${{ env.TAG }}
|
||||
homebrew:
|
||||
name: Release to Homebrew
|
||||
permissions: write-all
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
- id: gpg
|
||||
- id: setup-gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Ruby
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-ruby
|
||||
name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "2.6"
|
||||
- name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
|
||||
- name: Commit Tap
|
||||
- id: update-tap
|
||||
name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.TAG }}"
|
||||
- id: commit-tap
|
||||
name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.BUN_VERSION }}
|
||||
commit_options: --gpg-sign=${{ steps.setup-gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.TAG }}
|
||||
commit_user_name: robobun
|
||||
commit_user_email: robobun@oven.sh
|
||||
commit_author: robobun <robobun@oven.sh>
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
permissions: write-all
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
- id: checkout
|
||||
name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
- name: Setup Bun
|
||||
- id: setup-env
|
||||
name: Setup Environment
|
||||
run: |
|
||||
TAG="${{ github.event.inputs.tag }}"
|
||||
TAG="${TAG:-"${{ github.event.release.tag_name }}"}"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- id: setup-bun
|
||||
name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Install Dependencies
|
||||
bun-version: canary
|
||||
- id: bun-install
|
||||
name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
|
||||
- id: bun-run
|
||||
name: Release
|
||||
run: bun upload-s3 -- "${{ env.TAG }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
|
||||
337
.github/workflows/bun-windows-x64.yml
vendored
337
.github/workflows/bun-windows-x64.yml
vendored
@@ -1,337 +0,0 @@
|
||||
name: bun-windows-x64
|
||||
|
||||
concurrency:
|
||||
group: bun-windows-x64-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
# note: in other files, this version is only the major version, but for windows it is the full version
|
||||
LLVM_VERSION: 16.0.6
|
||||
BUN_DOWNLOAD_URL_BASE: https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/latest
|
||||
|
||||
cpu: native
|
||||
arch: x86_64
|
||||
tag: bun-windows-x64
|
||||
# TODO: wire this up to workflow_dispatch.
|
||||
# github's expression syntax makes this hard to set a default to true
|
||||
canary: true
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
pull_request:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "src/**/*"
|
||||
- "test/**/*"
|
||||
- "packages/bun-usockets/src/**/*"
|
||||
- "build.zig"
|
||||
- "Makefile"
|
||||
- "Dockerfile"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
# inputs:
|
||||
# is-canary:
|
||||
# type: boolean
|
||||
# description: Is Canary Build?
|
||||
# default: true
|
||||
|
||||
jobs:
|
||||
windows-zig:
|
||||
name: Zig Build
|
||||
runs-on: med-ubuntu
|
||||
timeout-minutes: 60
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
with:
|
||||
install: true
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Calculate Canary Revision
|
||||
if: ${{ env.canary == 'true' }}
|
||||
id: canary
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Compile Zig Object
|
||||
uses: docker/build-push-action@v3
|
||||
if: runner.arch == 'X64'
|
||||
with:
|
||||
context: .
|
||||
push: false
|
||||
# This doesnt seem to work
|
||||
# cache-from: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
# cache-to: type=s3,endpoint_url=${{ secrets.CACHE_S3_ENDPOINT }},blobs_prefix=docker_blobs/,manifests_prefix=docker_manifests/,access_key_id=${{ secrets.CACHE_S3_ACCESS_KEY_ID }},secret_access_key=${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }},bucket=bun,region=auto
|
||||
build-args: |
|
||||
BUILDARCH=${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ runner.arch == 'X64' && 'x86_64' || 'aarch64' }}
|
||||
ARCH=${{ env.arch }}
|
||||
CPU_TARGET=${{ env.cpu }}
|
||||
TRIPLET=${{ env.arch }}-windows-msvc
|
||||
GIT_SHA=${{ github.sha }}
|
||||
CANARY=${{ env.canary == 'true' && steps.canary.outputs.canary_revision || '0' }}
|
||||
platforms: linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
target: build_release_obj
|
||||
outputs: type=local,dest=${{runner.temp}}/release
|
||||
|
||||
- name: Upload Zig Object
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-zig
|
||||
path: ${{runner.temp}}/release/bun-zig.o
|
||||
|
||||
windows-dependencies:
|
||||
name: Dependencies
|
||||
runs-on: windows
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Clone Submodules
|
||||
run: .\scripts\update-submodules.ps1
|
||||
- name: Hash submodule versions
|
||||
shell: pwsh
|
||||
run: |
|
||||
$data = "$(& {
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
clang --version
|
||||
rustc --version
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.sh', 'scripts/all-dependencies.sh' | Sort-Object -Property Name).FullName | Out-String
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "sha=${hash}" >> $env:GITHUB_OUTPUT
|
||||
id: submodule-versions
|
||||
|
||||
- name: Try fetch dependencies
|
||||
id: cache-deps-restore
|
||||
uses: actions/cache/restore@v3
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-deps-${{ env.tag }}-${{ steps.submodule-versions.outputs.sha }}
|
||||
|
||||
- name: Install LLVM ${{ env.LLVM_VERSION }}
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
|
||||
- name: Install Ninja
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
run: choco install -y ninja
|
||||
|
||||
- name: Build Dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
run: |
|
||||
.\scripts\env.ps1
|
||||
Invoke-WebRequest -Uri "https://www.nasm.us/pub/nasm/releasebuilds/2.16.01/win64/nasm-2.16.01-win64.zip" -OutFile nasm.zip
|
||||
Expand-Archive nasm.zip (mkdir -Force "nasm")
|
||||
$Nasm = (Get-ChildItem "nasm")
|
||||
$env:Path += ";${Nasm}"
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
|
||||
- name: Upload Dependencies
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-deps
|
||||
path: bun-deps/
|
||||
|
||||
- name: Cache Dependencies
|
||||
if: ${{ !steps.cache-deps-restore.outputs.cache-hit }}
|
||||
id: cache-deps-save
|
||||
uses: actions/cache/save@v3
|
||||
with:
|
||||
path: bun-deps
|
||||
key: ${{ steps.cache-deps-restore.outputs.cache-primary-key }}
|
||||
|
||||
windows-codegen:
|
||||
name: Codegen
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: |
|
||||
curl -fsSL $BUN_DOWNLOAD_URL_BASE/bun-linux-x64.zip > bun.zip
|
||||
unzip bun.zip
|
||||
export PATH="$PWD/bun-linux-x64:$PATH"
|
||||
./scripts/cross-compile-codegen.sh win32 x64
|
||||
# Sort of a hack to do this step in the codegen stage
|
||||
- name: Calculate Canary Revision
|
||||
if: ${{ env.canary == 'true' }}
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ secrets.GITHUB_TOKEN }}" bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
path: build-codegen-win32-x64/
|
||||
|
||||
windows-cpp:
|
||||
name: C++ Build
|
||||
needs: [windows-codegen]
|
||||
runs-on: windows
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 90
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- run: choco install -y ninja
|
||||
- name: Download Codegen
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
path: build
|
||||
- name: Build C++
|
||||
run: |
|
||||
# Using SCCache is blocked by
|
||||
# https://github.com/mozilla/sccache/issues/1843
|
||||
# https://github.com/mozilla/sccache/pull/1856
|
||||
# $sczip = "sccache-v0.6.0-x86_64-pc-windows-msvc"
|
||||
|
||||
# Invoke-WebRequest -Uri "https://github.com/mozilla/sccache/releases/download/v0.6.0/${sczip}.zip" -OutFile "${sczip}.zip"
|
||||
# Expand-Archive "${sczip}.zip"
|
||||
# $env:SCCACHE_BUCKET="bun"
|
||||
# $env:SCCACHE_REGION="auto"
|
||||
# $env:SCCACHE_S3_USE_SSL="true"
|
||||
# $env:SCCACHE_ENDPOINT="${{ secrets.CACHE_S3_ENDPOINT }}"
|
||||
# $env:AWS_ACCESS_KEY_ID="${{ secrets.CACHE_S3_ACCESS_KEY_ID }}"
|
||||
# $env:AWS_SECRET_ACCESS_KEY="${{ secrets.CACHE_S3_SECRET_ACCESS_KEY }}"
|
||||
# $SCCACHE="$PWD/${sczip}/${sczip}/sccache.exe"
|
||||
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
|
||||
.\scripts\env.ps1
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
# "-DCCACHE_PROGRAM=${SCCACHE}"
|
||||
# TODO(@paperdave): pass the proper revision of canary here. without it,
|
||||
# the properties window will display the wrong version.
|
||||
# not really a big deal for time being. should be resolved before release
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-cpp
|
||||
path: build/bun-cpp-objects.a
|
||||
|
||||
windows-link:
|
||||
name: Link
|
||||
needs: [windows-dependencies, windows-codegen, windows-cpp, windows-zig]
|
||||
runs-on: windows-latest
|
||||
if: github.repository_owner == 'oven-sh'
|
||||
timeout-minutes: 30
|
||||
permissions: write-all
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: KyleMayes/install-llvm-action@1a3da29f56261a1e1f937ec88f0856a9b8321d7e
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- run: choco install -y ninja
|
||||
- name: Download Codegen
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-codegen
|
||||
path: build
|
||||
- name: Download Dependencies
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-deps
|
||||
path: bun-deps
|
||||
- name: Download Zig Object
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-zig
|
||||
path: bun-zig
|
||||
- name: Download C++ Objects
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}-cpp
|
||||
path: bun-cpp
|
||||
- name: Link
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1
|
||||
Set-Location build
|
||||
$CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)"
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Package
|
||||
run: |
|
||||
$Dist = mkdir -Force "${{ env.tag }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive $Dist ${{ env.tag }}.zip
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.tag }}
|
||||
path: ${{ env.tag }}.zip
|
||||
- name: Release
|
||||
id: release
|
||||
uses: ncipollo/release-action@v1
|
||||
if: |
|
||||
github.repository_owner == 'oven-sh'
|
||||
&& github.ref == 'refs/heads/main'
|
||||
with:
|
||||
prerelease: true
|
||||
body: "This canary release of Bun corresponds to the commit [${{ github.sha }}]"
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
name: "Canary (${{github.sha}})"
|
||||
tag: "canary"
|
||||
artifacts: "${{env.tag}}.zip"
|
||||
- uses: sarisia/actions-status-discord@v1
|
||||
if: failure() && github.repository_owner == 'oven-sh' && github.event_name == 'pull_request'
|
||||
with:
|
||||
title: ""
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
status: ${{ job.status }}
|
||||
noprefix: true
|
||||
nocontext: true
|
||||
description: |
|
||||
Pull Request
|
||||
### [${{github.event.pull_request.title}}](https://github.com/oven-sh/bun/pull/${{github.event.number}})
|
||||
|
||||
@${{ github.actor }}
|
||||
|
||||
Build failed on ${{ env.tag }}:
|
||||
|
||||
**[View build output](https://github.com/oven-sh/bun/actions/runs/${{github.run_id}})**
|
||||
|
||||
[Commit ${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})
|
||||
45
.github/workflows/format.yml
vendored
45
.github/workflows/format.yml
vendored
@@ -1,45 +0,0 @@
|
||||
name: autofix.ci # Must be named this for autofix.ci to work
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.12.0-dev.1604+caae40c21
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
format:
|
||||
name: format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
packages
|
||||
test
|
||||
bench
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
with:
|
||||
version: ${{ env.ZIG_VERSION }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Format
|
||||
run: |
|
||||
bun fmt
|
||||
bun fmt:zig
|
||||
- name: Commit # https://autofix.ci/
|
||||
uses: autofix-ci/action@d3e591514b99d0fca6779455ff8338516663f7cc
|
||||
76
.github/workflows/prettier-fmt.yml
vendored
Normal file
76
.github/workflows/prettier-fmt.yml
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
name: prettier
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
prettier-fmt:
|
||||
name: prettier
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
prettier_fmt_errs: ${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- id: setup
|
||||
name: Setup
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
- id: install
|
||||
name: Install prettier
|
||||
run: bun install
|
||||
- name: Run prettier
|
||||
id: fmt
|
||||
run: |
|
||||
rm -f .failed
|
||||
bun prettier --check "./bench/**/*.{ts,tsx,js,jsx,mjs}" "./test/**/*.{ts,tsx,js,jsx,mjs}" "./src/**/*.{ts,tsx,js,jsx}" --config .prettierrc.cjs 2> prettier-fmt.err > prettier-fmt1.err || echo 'failed' > .failed
|
||||
|
||||
if [ -s .failed ]; then
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "prettier_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
cat prettier-fmt1.err >> "${GITHUB_OUTPUT}"
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `prettier` reported errors
|
||||
|
||||
```js
|
||||
${{ steps.fmt.outputs.prettier_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
```sh
|
||||
bun fmt
|
||||
```
|
||||
|
||||
You might need to run `bun install` locally and configure your text editor to [auto-format on save](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode).
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.prettier_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: prettier-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `prettier` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.prettier_fmt_errs != ''
|
||||
run: exit 1
|
||||
87
.github/workflows/zig-fmt.yml
vendored
Normal file
87
.github/workflows/zig-fmt.yml
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
name: zig-fmt
|
||||
|
||||
env:
|
||||
ZIG_VERSION: 0.12.0-dev.163+6780a6bbf
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- jarred/test-actions
|
||||
paths:
|
||||
- "src/**/*.zig"
|
||||
- "src/*.zig"
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
zig-fmt:
|
||||
name: zig fmt
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
zig_fmt_errs: ${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install zig
|
||||
run: |
|
||||
curl https://ziglang.org/builds/zig-linux-x86_64-${{env.ZIG_VERSION}}.tar.xz -L -o zig.tar.xz
|
||||
tar -xf zig.tar.xz
|
||||
echo "$(pwd)/zig-linux-x86_64-${{env.ZIG_VERSION}}" >> $GITHUB_PATH
|
||||
- name: Run zig fmt
|
||||
id: fmt
|
||||
run: |
|
||||
zig fmt --check src/*.zig src/**/*.zig 2> zig-fmt.err > zig-fmt.err2 || echo "Failed"
|
||||
delimiter="$(openssl rand -hex 8)"
|
||||
echo "zig_fmt_errs<<${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
|
||||
if [ -s zig-fmt.err ]; then
|
||||
echo "// The following errors occurred:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
if [ -s zig-fmt.err2 ]; then
|
||||
echo "// The following files were not formatted:" >> "${GITHUB_OUTPUT}"
|
||||
cat zig-fmt.err2 >> "${GITHUB_OUTPUT}"
|
||||
fi
|
||||
|
||||
echo "${delimiter}" >> "${GITHUB_OUTPUT}"
|
||||
- name: Comment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
message: |
|
||||
❌ @${{ github.actor }} `zig fmt` reported errors. Consider configuring your text editor to [auto-format on save](https://github.com/ziglang/vscode-zig)
|
||||
|
||||
```zig
|
||||
// # zig fmt --check src/*.zig src/**/*.zig
|
||||
${{ steps.fmt.outputs.zig_fmt_errs }}
|
||||
```
|
||||
|
||||
To one-off fix this manually, run:
|
||||
|
||||
```sh
|
||||
zig fmt src/*.zig src/**/*.zig
|
||||
```
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Uncomment on PR
|
||||
if: steps.fmt.outputs.zig_fmt_errs == ''
|
||||
uses: thollander/actions-comment-pull-request@v2
|
||||
with:
|
||||
comment_tag: zig-fmt
|
||||
mode: upsert
|
||||
create_if_not_exists: false
|
||||
message: |
|
||||
✅ `zig fmt` errors have been resolved. Thank you.
|
||||
|
||||
<sup>[#${{github.sha}}](https://github.com/oven-sh/bun/commits/${{github.sha}})</sup>
|
||||
<sup>zig v${{env.ZIG_VERSION}}</sup>
|
||||
|
||||
- name: Fail the job
|
||||
if: steps.fmt.outputs.zig_fmt_errs != ''
|
||||
run: exit 1
|
||||
31
.gitignore
vendored
31
.gitignore
vendored
@@ -6,7 +6,6 @@ packages/*/*.wasm
|
||||
profile.json
|
||||
|
||||
node_modules
|
||||
.envrc
|
||||
.swcrc
|
||||
yarn.lock
|
||||
dist
|
||||
@@ -110,7 +109,7 @@ misctools/machbench
|
||||
*.big
|
||||
.eslintcache
|
||||
|
||||
/bun-webkit
|
||||
bun-webkit
|
||||
|
||||
src/deps/c-ares/build
|
||||
src/bun.js/bindings-obj
|
||||
@@ -124,7 +123,6 @@ cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
|
||||
/test.ts
|
||||
/test.js
|
||||
|
||||
src/js/out/modules*
|
||||
src/js/out/functions*
|
||||
@@ -134,29 +132,4 @@ src/js/out/DebugPath.h
|
||||
make-dev-stats.csv
|
||||
|
||||
.uuid
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
test/js/bun/glob/fixtures
|
||||
*.lib
|
||||
*.pdb
|
||||
CMakeFiles
|
||||
build.ninja
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
CMakeCache.txt
|
||||
cmake_install.cmake
|
||||
compile_commands.json
|
||||
|
||||
*.lib
|
||||
x64
|
||||
**/*.vcxproj*
|
||||
**/*.sln*
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
|
||||
/.webkit-cache
|
||||
/.cache
|
||||
/src/deps/libuv
|
||||
/build-*/
|
||||
|
||||
.vs
|
||||
tsconfig.tsbuildinfo
|
||||
49
.gitmodules
vendored
49
.gitmodules
vendored
@@ -1,3 +1,10 @@
|
||||
[submodule "src/deps/picohttpparser"]
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/javascript/jsc/WebKit"]
|
||||
path = src/bun.js/WebKit
|
||||
url = https://github.com/oven-sh/WebKit.git
|
||||
@@ -6,13 +13,6 @@ depth = 1
|
||||
update = none
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/picohttpparser"]
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
@@ -56,30 +56,15 @@ depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/c-ares"]
|
||||
path = src/deps/c-ares
|
||||
url = https://github.com/c-ares/c-ares.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
path = src/deps/c-ares
|
||||
url = https://github.com/c-ares/c-ares.git
|
||||
[submodule "src/deps/zstd"]
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/ls-hpack"]
|
||||
path = src/deps/ls-hpack
|
||||
url = https://github.com/litespeedtech/ls-hpack.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
21
.scripts/make-dev-timer.ts
Normal file
21
.scripts/make-dev-timer.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
// I would have made this a bash script but there isn't an easy way to track
|
||||
// time in bash sub-second cross platform.
|
||||
import fs from "fs";
|
||||
const start = Date.now() + 5;
|
||||
const result = Bun.spawnSync(process.argv.slice(2), {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
});
|
||||
const end = Date.now();
|
||||
const diff = (Math.max(Math.round(end - start), 0) / 1000).toFixed(3);
|
||||
const success = result.exitCode === 0;
|
||||
try {
|
||||
const line = `${new Date().toISOString()}, ${success ? "success" : "fail"}, ${diff}\n`;
|
||||
if (fs.existsSync(".scripts/make-dev-stats.csv")) {
|
||||
fs.appendFileSync(".scripts/make-dev-stats.csv", line);
|
||||
} else {
|
||||
fs.writeFileSync(".scripts/make-dev-stats.csv", line);
|
||||
}
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
process.exit(result.exitCode);
|
||||
13
.scripts/postinstall.sh
Executable file
13
.scripts/postinstall.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -euxo pipefail
|
||||
|
||||
# if bun-webkit node_modules directory exists
|
||||
if [ -d ./node_modules/bun-webkit ]; then
|
||||
rm -f bun-webkit
|
||||
# get the first matching bun-webkit-* directory name
|
||||
ln -s ./node_modules/$(ls ./node_modules | grep bun-webkit- | head -n 1) ./bun-webkit
|
||||
fi
|
||||
|
||||
# sets up vscode C++ intellisense
|
||||
rm -f .vscode/clang++
|
||||
ln -s $(which clang++-16 || which clang++) .vscode/clang++ 2>/dev/null
|
||||
80
.vscode/c_cpp_properties.json
vendored
80
.vscode/c_cpp_properties.json
vendored
@@ -1,64 +1,16 @@
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug",
|
||||
"name": "Mac",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/bun-webkit/include",
|
||||
"${workspaceFolder}/build/codegen",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/src",
|
||||
"${workspaceFolder}/packages/"
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/build/bun-webkit/include",
|
||||
"${workspaceFolder}/src/bun.js/bindings",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/src/deps/*",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/*",
|
||||
"${workspaceFolder}/packages/bun-uws/*",
|
||||
"${workspaceFolder}/src/napi/*"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
},
|
||||
"defines": [
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
"STATICALLY_LINKED_WITH_WTF=1",
|
||||
"BUILDING_WITH_CMAKE=1",
|
||||
"NOMINMAX",
|
||||
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
"BUILDING_JSCONLY__",
|
||||
"USE_FOUNDATION=1",
|
||||
"ASSERT_ENABLED=1",
|
||||
"DU_DISABLE_RENAMING=1"
|
||||
],
|
||||
"macFrameworkPath": [],
|
||||
"compilerPath": "${workspaceFolder}/.vscode/clang++",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "c++20"
|
||||
},
|
||||
{
|
||||
"name": "BunWithJSCDebug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
|
||||
"${workspaceFolder}/../webkit-build/include/",
|
||||
"${workspaceFolder}/bun-webkit/include/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
@@ -74,11 +26,13 @@
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/../webkit-build/include/",
|
||||
"${workspaceFolder}/bun-webkit/include/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Release/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
@@ -95,7 +49,7 @@
|
||||
"${workspaceFolder}/src/napi"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb_debug"
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
},
|
||||
"defines": [
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
@@ -105,7 +59,7 @@
|
||||
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
"BUILDING_JSCONLY__",
|
||||
"USE_FOUNDATION=1",
|
||||
"ASSERT_ENABLED=1",
|
||||
"ASSERT_ENABLED=0",
|
||||
"DU_DISABLE_RENAMING=1"
|
||||
],
|
||||
"macFrameworkPath": [],
|
||||
|
||||
13
.vscode/launch.json
generated
vendored
13
.vscode/launch.json
generated
vendored
@@ -82,7 +82,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [*]",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "js/node"],
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -96,7 +96,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun test [*] (fast)",
|
||||
"program": "bun-debug",
|
||||
"args": ["test", "js"],
|
||||
"args": ["test"],
|
||||
// The cwd here must be the same as in CI. Or you will cause test failures that only happen in CI.
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
@@ -124,7 +124,7 @@
|
||||
"request": "launch",
|
||||
"name": "bun run [file]",
|
||||
"program": "bun-debug",
|
||||
"args": ["run", "${file}"],
|
||||
"args": ["run", "${file}", "${file}"],
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
@@ -307,10 +307,13 @@
|
||||
"name": "bun install",
|
||||
"program": "bun-debug",
|
||||
"args": ["install"],
|
||||
"cwd": "/Users/jarred/Build/worky",
|
||||
"cwd": "${fileDirname}",
|
||||
"console": "internalConsole",
|
||||
"env": {}
|
||||
"env": {
|
||||
"BUN_DEBUG_QUIET_LOGS": "1"
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
|
||||
92
.vscode/settings.json
vendored
92
.vscode/settings.json
vendored
@@ -7,36 +7,44 @@
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
"zig.buildOnSave": false,
|
||||
"zig.formattingProvider": "zls",
|
||||
// We do this until we upgrade to latest Zig so that zls doesn't break our code.
|
||||
"zig.formattingProvider": "extension",
|
||||
"zig.buildArgs": ["obj", "-Dfor-editor"],
|
||||
"zig.buildOption": "build",
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.initialSetupDone": true,
|
||||
"editor.formatOnSave": true,
|
||||
"[zig]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.useTabStops": false,
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig"
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[ts]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[js]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"git.ignoreSubmodules": true,
|
||||
|
||||
"[jsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[tsx]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[yaml]": {
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"[yaml]": {},
|
||||
"[markdown]": {
|
||||
"editor.unicodeHighlight.ambiguousCharacters": false,
|
||||
"editor.unicodeHighlight.invisibleCharacters": false,
|
||||
"diffEditor.ignoreTrimWhitespace": false,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.wordWrap": "on",
|
||||
"editor.quickSuggestions": {
|
||||
"comments": "off",
|
||||
@@ -56,6 +64,8 @@
|
||||
"**/*.xcscheme": true,
|
||||
"**/*.pem": true,
|
||||
"**/*.xcodeproj": true,
|
||||
"test/snapshots": true,
|
||||
"test/snapshots-no-hmr": true,
|
||||
"src/bun.js/WebKit": true,
|
||||
"src/deps/libarchive": true,
|
||||
"src/deps/mimalloc": true,
|
||||
@@ -68,26 +78,27 @@
|
||||
"src/deps/c-ares": true,
|
||||
"src/deps/tinycc": true,
|
||||
"src/deps/zstd": true,
|
||||
"**/*.i": true,
|
||||
"packages/bun-uws/fuzzing/seed-corpus/**/*": true
|
||||
"test/snippets/package-json-exports/_node_modules_copy": true,
|
||||
"src/js/out": true,
|
||||
"src/packages/bun-uws/fuzzing/seed-corpus/": true
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
"WebKit/JSTests": true,
|
||||
"WebKit/Tools": true,
|
||||
"WebKit/WebDriverTests": true,
|
||||
"WebKit/WebKit.xcworkspace": true,
|
||||
"WebKit/WebKitLibraries": true,
|
||||
"WebKit/Websites": true,
|
||||
"WebKit/resources": true,
|
||||
"WebKit/LayoutTests": true,
|
||||
"WebKit/ManualTests": true,
|
||||
"WebKit/PerformanceTests": true,
|
||||
"WebKit/WebKitLegacy": true,
|
||||
"WebKit/WebCore": true,
|
||||
"WebKit/WebDriver": true,
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true
|
||||
"src/bun.js/WebKit/JSTests": true,
|
||||
"src/bun.js/WebKit/Tools": true,
|
||||
"src/bun.js/WebKit/WebDriverTests": true,
|
||||
"src/bun.js/WebKit/WebKit.xcworkspace": true,
|
||||
"src/bun.js/WebKit/WebKitLibraries": true,
|
||||
"src/bun.js/WebKit/Websites": true,
|
||||
"src/bun.js/WebKit/resources": true,
|
||||
"src/bun.js/WebKit/LayoutTests": true,
|
||||
"src/bun.js/WebKit/ManualTests": true,
|
||||
"src/bun.js/WebKit/PerformanceTests": true,
|
||||
"src/bun.js/WebKit/WebKitLegacy": true,
|
||||
"src/bun.js/WebKit/WebCore": true,
|
||||
"src/bun.js/WebKit/WebDriver": true,
|
||||
"src/bun.js/WebKit/WebKitBuild": true,
|
||||
"src/bun.js/WebKit/WebInspectorUI": true
|
||||
},
|
||||
"[cpp]": {
|
||||
"editor.defaultFormatter": "xaver.clang-format"
|
||||
@@ -178,12 +189,20 @@
|
||||
"set": "cpp",
|
||||
"__memory": "cpp",
|
||||
"memory_resource": "cpp",
|
||||
"resource.h": "c",
|
||||
"sysinfo.h": "c",
|
||||
"*.tcc": "cpp",
|
||||
"list": "cpp",
|
||||
"shared_mutex": "cpp",
|
||||
"cinttypes": "cpp",
|
||||
"variant": "cpp",
|
||||
"sysctl.h": "c",
|
||||
"interface_adresses.h": "c",
|
||||
"interface_addresses.h": "c",
|
||||
"ctype.h": "c",
|
||||
"ethernet.h": "c",
|
||||
"inet.h": "c",
|
||||
"packet.h": "c",
|
||||
"queue": "cpp",
|
||||
"compare": "cpp",
|
||||
"concepts": "cpp",
|
||||
@@ -199,24 +218,9 @@
|
||||
"regex": "cpp",
|
||||
"span": "cpp",
|
||||
"valarray": "cpp",
|
||||
"codecvt": "cpp",
|
||||
"xtr1common": "cpp",
|
||||
"stop_token": "cpp",
|
||||
"xfacet": "cpp",
|
||||
"xhash": "cpp",
|
||||
"xiosbase": "cpp",
|
||||
"xlocale": "cpp",
|
||||
"xlocbuf": "cpp",
|
||||
"xlocinfo": "cpp",
|
||||
"xlocmes": "cpp",
|
||||
"xlocmon": "cpp",
|
||||
"xlocnum": "cpp",
|
||||
"xloctime": "cpp",
|
||||
"xmemory": "cpp",
|
||||
"xstring": "cpp",
|
||||
"xtree": "cpp",
|
||||
"xutility": "cpp"
|
||||
"codecvt": "cpp"
|
||||
},
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"eslint.workingDirectories": ["packages/bun-types"],
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
|
||||
29
.vscode/tasks.json
vendored
29
.vscode/tasks.json
vendored
@@ -2,10 +2,33 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Rebuild Debug",
|
||||
"command": "ninja",
|
||||
"args": ["-Cbuild"],
|
||||
"label": "build",
|
||||
"type": "process",
|
||||
"command": "zig",
|
||||
"args": ["build"],
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "silent",
|
||||
"focus": false,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": false
|
||||
},
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "run",
|
||||
"type": "process",
|
||||
"command": "zig",
|
||||
"args": ["run", "${file}"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"showReuseMessage": false,
|
||||
"clear": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
1291
CMakeLists.txt
1291
CMakeLists.txt
File diff suppressed because it is too large
Load Diff
@@ -10,7 +10,7 @@ Today (February 2023), Bun's codebase has five distinct parts:
|
||||
|
||||
- JavaScript, JSX, & TypeScript transpiler, module resolver, and related code
|
||||
- JavaScript runtime ([`src/bun.js/`](src/bun.js/))
|
||||
- JavaScript runtime bindings ([`src/bun.js/bindings/**/*.cpp`](src/bun.js/bindings/))
|
||||
- JavaScript runtime bindings ([`src/bun.zig/bindings/**/*.cpp`](src/bun.zig/bindings/))
|
||||
- Package manager ([`src/install/`](src/install/))
|
||||
- Shared utilities ([`src/string_immutable.zig`](src/string_immutable.zig))
|
||||
|
||||
@@ -18,7 +18,7 @@ The JavaScript transpiler & module resolver is mostly independent from the runti
|
||||
|
||||
## Getting started
|
||||
|
||||
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/contributing) to get your dev environment setup!
|
||||
Please refer to [Bun's Development Guide](https://bun.sh/docs/project/development) to get your dev environment setup!
|
||||
|
||||
## Memory management in Bun
|
||||
|
||||
|
||||
872
Dockerfile
872
Dockerfile
File diff suppressed because it is too large
Load Diff
113
Makefile
113
Makefile
@@ -39,6 +39,7 @@ endif
|
||||
|
||||
MIN_MACOS_VERSION ?= $(DEFAULT_MIN_MACOS_VERSION)
|
||||
BUN_BASE_VERSION = 1.0
|
||||
|
||||
CI ?= false
|
||||
|
||||
AR=
|
||||
@@ -65,7 +66,7 @@ PACKAGE_JSON_VERSION = $(BUN_BASE_VERSION).$(BUILD_ID)
|
||||
BUN_BUILD_TAG = bun-v$(PACKAGE_JSON_VERSION)
|
||||
BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
|
||||
PRETTIER ?= $(shell which prettier 2>/dev/null || echo "./node_modules/.bin/prettier")
|
||||
ESBUILD = "$(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")"
|
||||
ESBUILD = $(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")
|
||||
DSYMUTIL ?= $(shell which dsymutil 2>/dev/null || which dsymutil-15 2>/dev/null)
|
||||
WEBKIT_DIR ?= $(realpath src/bun.js/WebKit)
|
||||
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
|
||||
@@ -73,7 +74,7 @@ WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug
|
||||
WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO
|
||||
|
||||
|
||||
NPM_CLIENT = "$(shell which bun 2>/dev/null || which npm 2>/dev/null)"
|
||||
NPM_CLIENT ?= $(shell which bun 2>/dev/null || which npm 2>/dev/null)
|
||||
ZIG ?= $(shell which zig 2>/dev/null || echo -e "error: Missing zig. Please make sure zig is in PATH. Or set ZIG=/path/to-zig-executable")
|
||||
|
||||
# We must use the same compiler version for the JavaScriptCore bindings and JavaScriptCore
|
||||
@@ -186,6 +187,11 @@ BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-excep
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
|
||||
|
||||
DEFAULT_USE_BMALLOC := 1
|
||||
|
||||
|
||||
USE_BMALLOC ?= DEFAULT_USE_BMALLOC
|
||||
|
||||
# Set via postinstall
|
||||
ifeq (,$(realpath $(JSC_BASE_DIR)))
|
||||
JSC_BASE_DIR = $(realpath $(firstword $(wildcard bun-webkit)))
|
||||
@@ -374,7 +380,9 @@ ICU_FLAGS ?=
|
||||
# Ideally, we could just look up the linker search paths
|
||||
ifeq ($(OS_NAME),linux)
|
||||
LIB_ICU_PATH ?= $(JSC_LIB)
|
||||
ICU_FLAGS += $(LIB_ICU_PATH)/libicuuc.a $(LIB_ICU_PATH)/libicudata.a $(LIB_ICU_PATH)/libicui18n.a
|
||||
ICU_FLAGS += $(LIB_ICU_PATH)/libicuuc.a $(LIB_ICU_PATH)/libicudata.a $(LIB_ICU_PATH)/libicui18n.a
|
||||
else
|
||||
LIB_ICU_PATH ?= $(BUN_DEPS_DIR)
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
@@ -455,8 +463,7 @@ ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-lusockets \
|
||||
-lcares \
|
||||
-lzstd \
|
||||
$(BUN_DEPS_OUT_DIR)/libuwsockets.o \
|
||||
$(BUN_DEPS_OUT_DIR)/liblshpack.a
|
||||
$(BUN_DEPS_OUT_DIR)/libuwsockets.o
|
||||
|
||||
ARCHIVE_FILES = $(ARCHIVE_FILES_WITHOUT_LIBCRYPTO)
|
||||
|
||||
@@ -750,24 +757,14 @@ wasm: api mimalloc-wasm build-obj-wasm-small
|
||||
build-obj-safe:
|
||||
$(ZIG) build obj -Doptimize=ReleaseSafe -Dcpu="$(CPU_TARGET)"
|
||||
|
||||
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
|
||||
UWS_CC_FLAGS = -pthread -DLIBUS_USE_OPENSSL=1 -DUWS_HTTPRESPONSE_NO_WRITEMARK=1 -DLIBUS_USE_BORINGSSL=1 -DWITH_BORINGSSL=1 -Wpedantic -Wall -Wextra -Wsign-conversion -Wconversion $(UWS_INCLUDE) -DUWS_WITH_PROXY
|
||||
UWS_CXX_FLAGS = $(UWS_CC_FLAGS) -std=$(CXX_VERSION) -fno-exceptions -fno-rtti
|
||||
UWS_LDFLAGS = -I$(BUN_DEPS_DIR)/boringssl/include -I$(ZLIB_INCLUDE_DIR)
|
||||
USOCKETS_DIR = $(BUN_DIR)/packages/bun-usockets
|
||||
USOCKETS_SRC_DIR = $(USOCKETS_DIR)/src
|
||||
|
||||
|
||||
LSHPACK_SRC_DIR = $(BUN_DEPS_DIR)/ls-hpack
|
||||
LSHPACK_CC_FLAGS = -DXXH_HEADER_NAME="<xxhash.h>"
|
||||
LSHPACK_LDFLAGS = -I$(LSHPACK_SRC_DIR) -I$(LSHPACK_SRC_DIR)/deps/xxhash
|
||||
|
||||
lshpack:
|
||||
rm -rf $(LSHPACK_SRC_DIR)/*.i $(LSHPACK_SRC_DIR)/*.bc $(LSHPACK_SRC_DIR)/*.o $(LSHPACK_SRC_DIR)/*.s $(LSHPACK_SRC_DIR)/*.ii $(LSHPACK_SRC_DIR)/*.s
|
||||
cd $(LSHPACK_SRC_DIR) && $(CC_WITH_CCACHE) -I$(LSHPACK_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(LSHPACK_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/lshpack/src $(LSHPACK_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(LSHPACK_SRC_DIR)/lshpack.c) $(wildcard $(LSHPACK_SRC_DIR)/deps/**/*.c)
|
||||
cd $(LSHPACK_SRC_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/liblshpack.a $(LSHPACK_SRC_DIR)/*.{o,bc}
|
||||
|
||||
usockets:
|
||||
rm -rf $(USOCKETS_DIR)/*.i $(USOCKETS_DIR)/*.bc $(USOCKETS_DIR)/*.o $(USOCKETS_DIR)/*.s $(USOCKETS_DIR)/*.ii $(USOCKETS_DIR)/*.s $(BUN_DEPS_OUT_DIR)/libusockets.a
|
||||
rm -rf $(USOCKETS_DIR)/*.i $(USOCKETS_DIR)/*.bc $(USOCKETS_DIR)/*.o $(USOCKETS_DIR)/*.s $(USOCKETS_DIR)/*.ii $(USOCKETS_DIR)/*.s
|
||||
cd $(USOCKETS_DIR) && $(CC_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CFLAGS) $(UWS_CC_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.c) $(wildcard $(USOCKETS_SRC_DIR)/**/*.c)
|
||||
cd $(USOCKETS_DIR) && $(CXX_WITH_CCACHE) -I$(USOCKETS_SRC_DIR) -fno-builtin-malloc -fno-builtin-free -fno-builtin-realloc $(EMIT_LLVM_FOR_RELEASE) $(MACOS_MIN_FLAG) -fPIC $(CXXFLAGS) $(UWS_CXX_FLAGS) -save-temps -I$(BUN_DEPS_DIR)/uws/uSockets/src $(UWS_LDFLAGS) -g $(DEFAULT_LINKER_FLAGS) $(PLATFORM_LINKER_FLAGS) $(OPTIMIZATION_LEVEL) -c $(wildcard $(USOCKETS_SRC_DIR)/*.cpp) $(wildcard $(USOCKETS_SRC_DIR)/**/*.cpp)
|
||||
cd $(USOCKETS_DIR) && $(AR) rcvs $(BUN_DEPS_OUT_DIR)/libusockets.a $(USOCKETS_DIR)/*.{o,bc}
|
||||
@@ -836,10 +833,10 @@ fallback_decoder:
|
||||
|
||||
.PHONY: runtime_js
|
||||
runtime_js:
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV=\"production\" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.js; cat src/runtime.footer.js >> src/runtime.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-with-refresh.ts --format=iife --platform=browser --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.out.refresh.js; cat src/runtime.footer.with-refresh.js >> src/runtime.out.refresh.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.node.pre.out.js; cat src/runtime.node.pre.out.js src/runtime.footer.node.js > src/runtime.node.out.js
|
||||
@NODE_ENV=production $(ESBUILD) --define:process.env.NODE_ENV="production" --target=esnext --bundle src/runtime/index-without-hmr.ts --format=iife --platform=node --global-name=BUN_RUNTIME --minify --external:/bun:* > src/runtime.bun.pre.out.js; cat src/runtime.bun.pre.out.js src/runtime.footer.bun.js > src/runtime.bun.out.js
|
||||
|
||||
.PHONY: runtime_js_dev
|
||||
runtime_js_dev:
|
||||
@@ -940,9 +937,6 @@ clone-submodules:
|
||||
|
||||
.PHONY: headers
|
||||
headers:
|
||||
echo please don't run the headers generator anymore. i don't think it works.
|
||||
echo if you really need it, run make headers2
|
||||
headers2:
|
||||
rm -f /tmp/build-jsc-headers src/bun.js/bindings/headers.zig
|
||||
touch src/bun.js/bindings/headers.zig
|
||||
$(ZIG) build headers-obj
|
||||
@@ -1257,7 +1251,6 @@ jsc-build-mac-compile-debug:
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
|
||||
-G Ninja \
|
||||
$(CMAKE_FLAGS_WITHOUT_RELEASE) \
|
||||
@@ -1298,7 +1291,7 @@ jsc-build-linux-compile-config:
|
||||
jsc-build-linux-compile-build:
|
||||
mkdir -p $(WEBKIT_RELEASE_DIR) && \
|
||||
cd $(WEBKIT_RELEASE_DIR) && \
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects" -DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
|
||||
|
||||
|
||||
@@ -1333,7 +1326,7 @@ release-bindings: $(OBJ_DIR) $(OBJ_FILES) $(WEBCORE_OBJ_FILES) $(SQLITE_OBJ_FILE
|
||||
# Do not add $(DEBUG_DIR) to this list
|
||||
# It will break caching, causing you to have to wait for every .cpp file to rebuild.
|
||||
.PHONY: bindings
|
||||
bindings-real: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES) $(DEBUG_WEBCRYPTO_OBJ_FILES)
|
||||
bindings: $(DEBUG_OBJ_DIR) $(DEBUG_OBJ_FILES) $(DEBUG_WEBCORE_OBJ_FILES) $(DEBUG_SQLITE_OBJ_FILES) $(DEBUG_NODE_OS_OBJ_FILES) $(DEBUG_BUILTINS_OBJ_FILES) $(DEBUG_IO_FILES) $(DEBUG_MODULES_OBJ_FILES) $(DEBUG_WEBCRYPTO_OBJ_FILES)
|
||||
|
||||
.PHONY: jsc-bindings-mac
|
||||
jsc-bindings-mac: bindings
|
||||
@@ -1367,7 +1360,7 @@ mimalloc-debug:
|
||||
-GNinja \
|
||||
. \
|
||||
&& ninja
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(_MIMALLOC_DEBUG_FILE)
|
||||
cp $(BUN_DEPS_DIR)/mimalloc/$(_MIMALLOC_DEBUG_FILE) $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
|
||||
|
||||
# mimalloc is built as object files so that it can overload the system malloc on linux
|
||||
@@ -1492,12 +1485,12 @@ wasm-return1:
|
||||
$(ZIG) build-lib -OReleaseSmall test/bun.js/wasm-return-1-test.zig -femit-bin=test/bun.js/wasm-return-1-test.wasm -target wasm32-freestanding
|
||||
|
||||
generate-classes:
|
||||
bun src/codegen/generate-classes.ts
|
||||
bun src/bun.js/scripts/generate-classes.ts
|
||||
$(ZIG) fmt src/bun.js/bindings/generated_classes.zig
|
||||
$(CLANG_FORMAT) -i src/bun.js/bindings/ZigGeneratedClasses.h src/bun.js/bindings/ZigGeneratedClasses.cpp
|
||||
|
||||
generate-sink:
|
||||
bun src/codegen/generate-jssink.js
|
||||
bun src/bun.js/scripts/generate-jssink.js
|
||||
$(CLANG_FORMAT) -i src/bun.js/bindings/JSSink.cpp src/bun.js/bindings/JSSink.h
|
||||
./src/bun.js/scripts/create_hash_table src/bun.js/bindings/JSSink.cpp > src/bun.js/bindings/JSSinkLookupTable.h
|
||||
$(SED) -i -e 's/#include "Lookup.h"//' src/bun.js/bindings/JSSinkLookupTable.h
|
||||
@@ -1907,7 +1900,7 @@ cold-jsc-start:
|
||||
misctools/cold-jsc-start.cpp -o cold-jsc-start
|
||||
|
||||
.PHONY: vendor-without-npm
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd base64
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws tinycc c-ares zstd base64
|
||||
|
||||
|
||||
.PHONY: vendor-without-check
|
||||
@@ -1920,30 +1913,46 @@ vendor: assert-deps submodule vendor-without-check
|
||||
vendor-dev: assert-deps submodule npm-install-dev vendor-without-npm
|
||||
|
||||
.PHONY: bun
|
||||
bun:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
bun: vendor identifier-cache build-obj bun-link-lld-release bun-codesign-release-local
|
||||
|
||||
cpp:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: static-hash-table
|
||||
static-hash-table:
|
||||
bun src/js/_codegen/static-hash-tables.ts
|
||||
|
||||
zig:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: cpp
|
||||
cpp: ## compile src/js/builtins + all c++ code then link
|
||||
@make clean-bindings js
|
||||
@make static-hash-table
|
||||
@make bindings -j$(CPU_COUNT)
|
||||
@make link
|
||||
|
||||
dev:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: cpp
|
||||
cpp-no-link:
|
||||
@make clean-bindings js
|
||||
@make bindings -j$(CPU_COUNT)
|
||||
|
||||
setup:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: zig
|
||||
zig: ## compile zig code then link
|
||||
@make mkdir-dev dev-obj link
|
||||
|
||||
bindings:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: zig-no-link
|
||||
zig-no-link:
|
||||
@make mkdir-dev dev-obj
|
||||
|
||||
help:
|
||||
@echo 'makefile is deprecated - use `cmake` / `bun run build`'
|
||||
@echo 'See https://bun.sh/docs/project/contributing for more details'
|
||||
.PHONY: dev
|
||||
dev: # combo of `make cpp` and `make zig`
|
||||
@make cpp-no-link zig-no-link -j2
|
||||
@make link
|
||||
|
||||
.PHONY: setup
|
||||
setup: vendor-dev identifier-cache clean-bindings
|
||||
make jsc-check dev
|
||||
@echo ""
|
||||
@echo "First build complete!"
|
||||
@echo "\"bun-debug\" is available at $(DEBUG_BIN)/bun-debug"
|
||||
@echo ""
|
||||
|
||||
.PHONY: help
|
||||
help: ## to print this help
|
||||
@echo "For detailed build instructions, see https://bun.sh/docs/project/development"
|
||||
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z0-9_-]+:.*?## / {gsub("\\\\n",sprintf("\n%22c",""), $$2);printf "\033[36m%-20s\033[0m \t\t%s\n", $$1, $$2}' $(MAKEFILE_LIST)
|
||||
|
||||
@@ -31,7 +31,7 @@ Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a s
|
||||
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
|
||||
|
||||
```bash
|
||||
bun run index.tsx # TS and JSX supported out-of-the-box
|
||||
bun run index.tsx # TS and JSX supported out of the box
|
||||
```
|
||||
|
||||
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
|
||||
@@ -93,8 +93,7 @@ bun upgrade --canary
|
||||
- [`bun run`](https://bun.sh/docs/cli/run)
|
||||
- [`bun install`](https://bun.sh/docs/cli/install)
|
||||
- [`bun test`](https://bun.sh/docs/cli/test)
|
||||
- [`bun init`](https://bun.sh/docs/cli/init)
|
||||
- [`bun create`](https://bun.sh/docs/cli/bun-create)
|
||||
- [`bun create`](https://bun.sh/docs/cli/create)
|
||||
- [`bunx`](https://bun.sh/docs/cli/bunx)
|
||||
- Runtime
|
||||
- [Runtime](https://bun.sh/docs/runtime/index)
|
||||
@@ -128,7 +127,7 @@ bun upgrade --canary
|
||||
|
||||
## Contributing
|
||||
|
||||
Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun.
|
||||
Refer to the [Project > Development](https://bun.sh/docs/project/development) guide to start contributing to Bun.
|
||||
|
||||
## License
|
||||
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,19 +0,0 @@
|
||||
import micromatch from "micromatch";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "**/*.js", () => {
|
||||
doMatch("foo/bar.js", "**/*.js");
|
||||
});
|
||||
|
||||
bench((Glob ? "Bun.Glob - " : "micromatch - ") + "*.js", () => {
|
||||
doMatch("bar.js", "*.js");
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
min_max: true,
|
||||
percentiles: true,
|
||||
});
|
||||
@@ -1,113 +0,0 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import fg from "fast-glob";
|
||||
import { fdir } from "fdir";
|
||||
|
||||
const normalPattern = "*.ts";
|
||||
const recursivePattern = "**/*.ts";
|
||||
const nodeModulesPattern = "**/node_modules/**/*.js";
|
||||
|
||||
const benchFdir = false;
|
||||
const cwd = undefined;
|
||||
|
||||
const bunOpts = {
|
||||
cwd,
|
||||
followSymlinks: false,
|
||||
absolute: true,
|
||||
};
|
||||
|
||||
const fgOpts = {
|
||||
cwd,
|
||||
followSymbolicLinks: false,
|
||||
onlyFiles: false,
|
||||
absolute: true,
|
||||
};
|
||||
|
||||
const Glob = "Bun" in globalThis ? globalThis.Bun.Glob : undefined;
|
||||
|
||||
group({ name: `async pattern="${normalPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([normalPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(normalPattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(normalPattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `async-recursive pattern="${recursivePattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([recursivePattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(recursivePattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(recursivePattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `sync pattern="${normalPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", () => {
|
||||
const entries = fg.globSync([normalPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", () => {
|
||||
const entries = [...new Glob(normalPattern).scanSync(bunOpts)];
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = new fdir().withFullPaths().glob(normalPattern).crawl(process.cwd()).sync();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `sync-recursive pattern="${recursivePattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", () => {
|
||||
const entries = fg.globSync([recursivePattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", () => {
|
||||
const entries = [...new Glob(recursivePattern).scanSync(bunOpts)];
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = new fdir().withFullPaths().glob(recursivePattern).crawl(process.cwd()).sync();
|
||||
});
|
||||
});
|
||||
|
||||
group({ name: `node_modules pattern="${nodeModulesPattern}"`, summary: true }, () => {
|
||||
bench("fast-glob", async () => {
|
||||
const entries = await fg.glob([nodeModulesPattern], fgOpts);
|
||||
});
|
||||
|
||||
if (Glob)
|
||||
bench("Bun.Glob", async () => {
|
||||
const entries = await Array.fromAsync(new Glob(nodeModulesPattern).scan(bunOpts));
|
||||
});
|
||||
|
||||
if (benchFdir)
|
||||
bench("fdir", async () => {
|
||||
const entries = await new fdir().withFullPaths().glob(nodeModulesPattern).crawl(process.cwd()).withPromise();
|
||||
});
|
||||
});
|
||||
|
||||
await run({
|
||||
avg: true,
|
||||
colors: false,
|
||||
min_max: true,
|
||||
collect: true,
|
||||
percentiles: true,
|
||||
});
|
||||
@@ -7,8 +7,6 @@
|
||||
"benchmark": "^2.1.4",
|
||||
"esbuild": "^0.14.12",
|
||||
"eventemitter3": "^5.0.0",
|
||||
"fast-glob": "3.3.1",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^0.1.6"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -6,30 +6,23 @@ bench("await 1", async function () {
|
||||
return await 1;
|
||||
});
|
||||
|
||||
if (typeof process !== "undefined") {
|
||||
bench("process.nextTick x 100", async function () {
|
||||
var remaining = 100;
|
||||
var cb, promise;
|
||||
promise = new Promise(resolve => {
|
||||
cb = resolve;
|
||||
});
|
||||
|
||||
for (let i = 0; i < 100; i++) {
|
||||
process.nextTick(() => {
|
||||
if (--remaining === 0) cb();
|
||||
});
|
||||
}
|
||||
|
||||
return promise;
|
||||
});
|
||||
|
||||
bench("await 1 x 100", async function () {
|
||||
for (let i = 0; i < 100; i++) await 1;
|
||||
});
|
||||
function callnextTick(resolve) {
|
||||
process.nextTick(resolve);
|
||||
}
|
||||
|
||||
function awaitNextTick() {
|
||||
return new Promise(callnextTick);
|
||||
}
|
||||
|
||||
bench("promise.nextTick", async function () {
|
||||
return awaitNextTick();
|
||||
});
|
||||
|
||||
bench("await new Promise(resolve => resolve())", async function () {
|
||||
await new Promise(resolve => resolve());
|
||||
});
|
||||
bench("Promise.all(Array.from({length: 100}, () => new Promise((resolve) => resolve())))", async function () {
|
||||
return Promise.all(Array.from({ length: 100 }, () => Promise.resolve(1)));
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -4,9 +4,5 @@ var i = 0;
|
||||
const server = createServer((req, res) => {
|
||||
res.writeHead(200);
|
||||
res.end("Hello, World!" + i);
|
||||
if (i++ === 200_000 - 1)
|
||||
setTimeout(() => {
|
||||
console.log("RSS", (process.memoryUsage().rss / 1024 / 1024) | 0, "MB");
|
||||
process.exit(0);
|
||||
}, 0);
|
||||
if (i++ === 200_000 - 1) queueMicrotask(() => process.exit(0));
|
||||
}).listen(parseInt(process.env.PORT || "3000", 10));
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { IncomingMessage } from "node:http";
|
||||
|
||||
const headers = {
|
||||
date: "Mon, 06 Nov 2023 05:12:49 GMT",
|
||||
expires: "-1",
|
||||
"cache-control": "private, max-age=0",
|
||||
"content-type": "text/html; charset=ISO-8859-1",
|
||||
"content-security-policy-report-only":
|
||||
"object-src 'none';base-uri 'self';script-src 'nonce-lcrU7l9xScCq4urW13K9gw' 'strict-dynamic' 'report-sample' 'unsafe-eval' 'unsafe-inline' https: http:;report-uri https://csp.withgoogle.com/csp/gws/other-hp",
|
||||
"x-xss-protection": "0",
|
||||
"x-frame-options": "SAMEORIGIN",
|
||||
"accept-ranges": "none",
|
||||
vary: "Accept-Encoding",
|
||||
"transfer-encoding": "chunked",
|
||||
"set-cookie": [
|
||||
"1P_JAR=2023-11-06-05; expires=Wed, 06-Dec-2023 05:12:49 GMT; path=/; domain=.google.com; Secure",
|
||||
"AEC=Ackid1TiuGtRsmu1yaDCAdL1u1J4eM4S67simzDHfWaMPQzH-UB4DZkRwm8; expires=Sat, 04-May-2024 05:12:49 GMT; path=/; domain=.google.com; Secure; HttpOnly; SameSite=lax",
|
||||
"NID=511=jQcg9cM7vjKawWnf6f3qhs3WDIIN2gaRq3i4bdMiVRWFkaFNYmiI-Xquf1kAmWGcmDN0skldS7uHheru3CMJrWjMt56VaaqO6Pilb54jFjQS_ZJRfG3Uc7dGV5WXGV-slUGE1Bicxlajdn0E_R8tZOoWiFzFDQW7YGmyfRqWQ2k; expires=Tue, 07-May-2024 05:12:49 GMT; path=/; domain=.google.com; HttpOnly",
|
||||
],
|
||||
p3p: 'CP="This is not a P3P policy! See g.co/p3phelp for more info."',
|
||||
server: "gws",
|
||||
"alt-svc": 'h3=":443"; ma=2592000,h3-29=":443"; ma=2592000',
|
||||
};
|
||||
|
||||
const request = new Request("https://www.google.com/", {
|
||||
headers: new Headers(headers),
|
||||
method: "GET",
|
||||
});
|
||||
|
||||
// const server = Bun.serve({
|
||||
// port: 8080,
|
||||
// async fetch(request) {
|
||||
// // bench("new IncomingMessage()", b => {
|
||||
// // for (let i = 0; i < 1000; i++) {
|
||||
// // new IncomingMessage(request);
|
||||
// // }
|
||||
// // });
|
||||
// const msg = new IncomingMessage(request);
|
||||
// console.log(msg.headers, msg.rawHeaders, msg.url);
|
||||
// // await run();
|
||||
// return new Response("Hello, world!");
|
||||
// },
|
||||
// });
|
||||
|
||||
bench("new IncomingMessage()", b => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
new IncomingMessage(request);
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -77,24 +77,4 @@ bench("ConventionalPrivates", () => {
|
||||
new Foo().run();
|
||||
});
|
||||
|
||||
const _state = Symbol("state");
|
||||
const _inc = Symbol("inc");
|
||||
|
||||
bench("SymbolPrivates", () => {
|
||||
class Foo {
|
||||
[_state] = 1;
|
||||
[_inc] = 13;
|
||||
|
||||
run() {
|
||||
let n = 1000000;
|
||||
while (n-- > 0) {
|
||||
this[_state] += this[_inc];
|
||||
}
|
||||
return n;
|
||||
}
|
||||
}
|
||||
|
||||
new Foo().run();
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -1,50 +1,13 @@
|
||||
import { readdirSync, readdir as readdirCb } from "fs";
|
||||
import { readdir } from "fs/promises";
|
||||
import { readdirSync } from "fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { argv } from "process";
|
||||
import { fileURLToPath } from "url";
|
||||
import { relative, resolve } from "path";
|
||||
import { createHash } from "crypto";
|
||||
|
||||
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
|
||||
if (dir.includes(process.cwd())) {
|
||||
dir = relative(process.cwd(), dir);
|
||||
}
|
||||
const dir = argv.length > 2 ? argv[2] : "/tmp";
|
||||
|
||||
const result = await readdir(dir, { recursive: true });
|
||||
const count = result.length;
|
||||
const syncCount = readdirSync(dir, { recursive: true }).length;
|
||||
|
||||
const hash = createHash("sha256").update(result.sort().join("\n")).digest("hex");
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: true})`, async () => {
|
||||
await readdir(dir, { recursive: true });
|
||||
});
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: true}) x 10`, async () => {
|
||||
const promises = [
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
readdir(dir, { recursive: true }),
|
||||
];
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench(`await readdir("${dir}", {recursive: false})`, async () => {
|
||||
await readdir(dir, { recursive: false });
|
||||
const count = readdirSync(dir).length;
|
||||
bench(`readdir("${dir}")`, () => {
|
||||
readdirSync(dir, { withFileTypes: true });
|
||||
});
|
||||
|
||||
await run();
|
||||
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
|
||||
|
||||
if (count !== syncCount) {
|
||||
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
|
||||
}
|
||||
console.log("\n\nFor", count, "files/dirs in", dir);
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { builtinModules } from "node:module";
|
||||
import { writeFile } from "node:fs/promises";
|
||||
import { spawnSync } from "child_process";
|
||||
|
||||
for (let builtin of builtinModules) {
|
||||
const path = `/tmp/require.${builtin.replaceAll("/", "_")}.cjs`;
|
||||
await writeFile(
|
||||
path,
|
||||
`
|
||||
const builtin = ${JSON.stringify(builtin)};
|
||||
const now = require("perf_hooks").performance.now();
|
||||
require(builtin);
|
||||
const end = require("perf_hooks").performance.now();
|
||||
process.stdout.write(JSON.stringify({builtin, time: end - now}) + "\\n");
|
||||
`,
|
||||
);
|
||||
const result = spawnSync(typeof Bun !== "undefined" ? "bun" : "node", [path], {
|
||||
stdio: ["inherit", "inherit", "inherit"],
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_NO_WARNINGS: "1",
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { tmpdir } from "node:os";
|
||||
import { promises, existsSync, mkdirSync } from "node:fs";
|
||||
const count = 1024 * 12;
|
||||
|
||||
var queue = new Array(count);
|
||||
var paths = new Array(count);
|
||||
for (let i = 0; i < count; i++) {
|
||||
const path = `${tmpdir()}/${Date.now()}.rm.dir${i}`;
|
||||
try {
|
||||
mkdirSync(path);
|
||||
} catch (e) {}
|
||||
paths[i] = path;
|
||||
queue[i] = promises.rmdir(path);
|
||||
}
|
||||
|
||||
await Promise.all(queue);
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
if (existsSync(paths[i])) {
|
||||
throw new Error(`Path ${paths[i]} was not removed`);
|
||||
}
|
||||
}
|
||||
@@ -1,143 +0,0 @@
|
||||
import { satisfies } from "semver";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
const tests = [
|
||||
["~1.2.3", "1.2.3", true],
|
||||
["~1.2", "1.2.0", true],
|
||||
["~1", "1.0.0", true],
|
||||
["~1", "1.2.0", true],
|
||||
["~1", "1.2.999", true],
|
||||
["~0.2.3", "0.2.3", true],
|
||||
["~0.2", "0.2.0", true],
|
||||
["~0.2", "0.2.1", true],
|
||||
["~0 ", "0.0.0", true],
|
||||
|
||||
["~1.2.3", "1.3.0", false],
|
||||
["~1.2", "1.3.0", false],
|
||||
["~1", "2.0.0", false],
|
||||
["~0.2.3", "0.3.0", false],
|
||||
["~0.2.3", "1.0.0", false],
|
||||
["~0 ", "1.0.0", false],
|
||||
["~0.2", "0.1.0", false],
|
||||
["~0.2", "0.3.0", false],
|
||||
|
||||
["~3.0.5", "3.3.0", false],
|
||||
|
||||
["^1.1.4", "1.1.4", true],
|
||||
|
||||
[">=3", "3.5.0", true],
|
||||
[">=3", "2.999.999", false],
|
||||
[">=3", "3.5.1", true],
|
||||
[">=3.x.x", "3.x.x", false],
|
||||
|
||||
["<6 >= 5", "5.0.0", true],
|
||||
["<6 >= 5", "4.0.0", false],
|
||||
["<6 >= 5", "6.0.0", false],
|
||||
["<6 >= 5", "6.0.1", false],
|
||||
|
||||
[">2", "3", false],
|
||||
[">2", "2.1", false],
|
||||
[">2", "2", false],
|
||||
[">2", "1.0", false],
|
||||
[">1.3", "1.3.1", false],
|
||||
[">1.3", "2.0.0", true],
|
||||
[">2.1.0", "2.2.0", true],
|
||||
["<=2.2.99999", "2.2.0", true],
|
||||
[">=2.1.99999", "2.2.0", true],
|
||||
["<2.2.99999", "2.2.0", true],
|
||||
[">2.1.99999", "2.2.0", true],
|
||||
[">1.0.0", "2.0.0", true],
|
||||
["1.0.0", "1.0.0", true],
|
||||
["1.0.0", "2.0.0", false],
|
||||
|
||||
["1.0.0 || 2.0.0", "1.0.0", true],
|
||||
["2.0.0 || 1.0.0", "1.0.0", true],
|
||||
["1.0.0 || 2.0.0", "2.0.0", true],
|
||||
["2.0.0 || 1.0.0", "2.0.0", true],
|
||||
["2.0.0 || >1.0.0", "2.0.0", true],
|
||||
|
||||
[">1.0.0 <2.0.0 <2.0.1 >1.0.1", "1.0.2", true],
|
||||
|
||||
["2.x", "2.0.0", true],
|
||||
["2.x", "2.1.0", true],
|
||||
["2.x", "2.2.0", true],
|
||||
["2.x", "2.3.0", true],
|
||||
["2.x", "2.1.1", true],
|
||||
["2.x", "2.2.2", true],
|
||||
["2.x", "2.3.3", true],
|
||||
|
||||
["<2.0.1 >1.0.0", "2.0.0", true],
|
||||
["<=2.0.1 >=1.0.0", "2.0.0", true],
|
||||
|
||||
["^2", "2.0.0", true],
|
||||
["^2", "2.9.9", true],
|
||||
["~2", "2.0.0", true],
|
||||
["~2", "2.1.0", true],
|
||||
["~2.2", "2.2.1", true],
|
||||
|
||||
["2.1.0 || > 2.2 || >3", "2.1.0", true],
|
||||
[" > 2.2 || >3 || 2.1.0", "2.1.0", true],
|
||||
[" > 2.2 || 2.1.0 || >3", "2.1.0", true],
|
||||
["> 2.2 || 2.1.0 || >3", "2.3.0", true],
|
||||
["> 2.2 || 2.1.0 || >3", "2.2.1", false],
|
||||
["> 2.2 || 2.1.0 || >3", "2.2.0", false],
|
||||
["> 2.2 || 2.1.0 || >3", "2.3.0", true],
|
||||
["> 2.2 || 2.1.0 || >3", "3.0.1", true],
|
||||
["~2", "2.0.0", true],
|
||||
["~2", "2.1.0", true],
|
||||
|
||||
["1.2.0 - 1.3.0", "1.2.2", true],
|
||||
["1.2 - 1.3", "1.2.2", true],
|
||||
["1 - 1.3", "1.2.2", true],
|
||||
["1 - 1.3", "1.3.0", true],
|
||||
["1.2 - 1.3", "1.3.1", true],
|
||||
["1.2 - 1.3", "1.4.0", false],
|
||||
["1 - 1.3", "1.3.1", true],
|
||||
|
||||
["1.2 - 1.3 || 5.0", "6.4.0", false],
|
||||
["1.2 - 1.3 || 5.0", "1.2.1", true],
|
||||
["5.0 || 1.2 - 1.3", "1.2.1", true],
|
||||
["1.2 - 1.3 || 5.0", "5.0", false],
|
||||
["5.0 || 1.2 - 1.3", "5.0", false],
|
||||
["1.2 - 1.3 || 5.0", "5.0.2", true],
|
||||
["5.0 || 1.2 - 1.3", "5.0.2", true],
|
||||
["1.2 - 1.3 || 5.0", "5.0.2", true],
|
||||
["5.0 || 1.2 - 1.3", "5.0.2", true],
|
||||
["5.0 || 1.2 - 1.3 || >8", "9.0.2", true],
|
||||
];
|
||||
|
||||
bench("semver.satisfies x " + tests.length, () => {
|
||||
for (const [range, version, expected] of tests) {
|
||||
if (satisfies(version, range) !== expected) {
|
||||
throw new Error("Unexpected result for " + range + " " + version);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (typeof Bun !== "undefined") {
|
||||
const satisfies = Bun.semver.satisfies;
|
||||
bench("Bun.semver.satisfies x " + tests.length, () => {
|
||||
for (const [range, version, expected] of tests) {
|
||||
if (satisfies(version, range) !== expected) {
|
||||
throw new Error("Unexpected result for " + range + " " + version);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
bench("semver.satisfies", () => {
|
||||
const [range, version, expected] = tests[0];
|
||||
if (satisfies(version, range) !== expected) {
|
||||
throw new Error("Unexpected result for " + range + " " + version);
|
||||
}
|
||||
});
|
||||
if (typeof Bun !== "undefined") {
|
||||
const satisfies = Bun.semver.satisfies;
|
||||
bench("Bun.semver.satisfies", () => {
|
||||
const [range, version, expected] = tests[0];
|
||||
if (satisfies(version, range) !== expected) {
|
||||
throw new Error("Unexpected result for " + range + " " + version);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
237
build.zig
237
build.zig
@@ -1,22 +1,31 @@
|
||||
const std = @import("std");
|
||||
const pathRel = std.fs.path.relative;
|
||||
const builtin = @import("builtin");
|
||||
const Wyhash = @import("./src/wyhash.zig").Wyhash;
|
||||
|
||||
const zig_version = builtin.zig_version;
|
||||
|
||||
/// Do not rename this constant. It is scanned by some scripts to determine which zig version to install.
|
||||
const recommended_zig_version = "0.12.0-dev.1604+caae40c21";
|
||||
|
||||
var is_debug_build = false;
|
||||
|
||||
fn exists(path: []const u8) bool {
|
||||
_ = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
|
||||
return true;
|
||||
fn moduleSource(comptime out: []const u8) FileSource {
|
||||
if (comptime std.fs.path.dirname(@src().file)) |base| {
|
||||
const outpath = comptime base ++ std.fs.path.sep_str ++ out;
|
||||
return FileSource.relative(outpath);
|
||||
} else {
|
||||
return FileSource.relative(out);
|
||||
}
|
||||
}
|
||||
|
||||
const color_map = std.ComptimeStringMap([]const u8, .{
|
||||
&.{ "black", "30m" },
|
||||
&.{ "blue", "34m" },
|
||||
&.{ "b", "1m" },
|
||||
&.{ "d", "2m" },
|
||||
&.{ "cyan", "36m" },
|
||||
&.{ "green", "32m" },
|
||||
&.{ "magenta", "35m" },
|
||||
&.{ "red", "31m" },
|
||||
&.{ "white", "37m" },
|
||||
&.{ "yellow", "33m" },
|
||||
});
|
||||
|
||||
fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: []const u8, target: anytype) !void {
|
||||
const io: *Module = brk: {
|
||||
var io: *Module = brk: {
|
||||
if (target.isDarwin()) {
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/io/io_darwin.zig"),
|
||||
@@ -37,38 +46,11 @@ fn addInternalPackages(b: *Build, step: *CompileStep, _: std.mem.Allocator, _: [
|
||||
};
|
||||
|
||||
step.addModule("async_io", io);
|
||||
|
||||
step.addModule("zlib-internal", brk: {
|
||||
if (target.isWindows()) {
|
||||
break :brk b.createModule(.{ .source_file = FileSource.relative("src/deps/zlib.win32.zig") });
|
||||
}
|
||||
|
||||
break :brk b.createModule(.{ .source_file = FileSource.relative("src/deps/zlib.posix.zig") });
|
||||
});
|
||||
|
||||
const async_: *Module = brk: {
|
||||
if (target.isDarwin() or target.isLinux() or target.isFreeBSD()) {
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/async/posix_event_loop.zig"),
|
||||
});
|
||||
} else if (target.isWindows()) {
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/async/windows_event_loop.zig"),
|
||||
});
|
||||
}
|
||||
|
||||
break :brk b.createModule(.{
|
||||
.source_file = FileSource.relative("src/async/stub_event_loop.zig"),
|
||||
});
|
||||
};
|
||||
step.addModule("async", async_);
|
||||
}
|
||||
|
||||
const BunBuildOptions = struct {
|
||||
is_canary: bool = false,
|
||||
canary_revision: u32 = 0,
|
||||
canary: bool = false,
|
||||
sha: [:0]const u8 = "",
|
||||
version: []const u8 = "",
|
||||
baseline: bool = false,
|
||||
bindgen: bool = false,
|
||||
sizegen: bool = false,
|
||||
@@ -77,8 +59,6 @@ const BunBuildOptions = struct {
|
||||
runtime_js_version: u64 = 0,
|
||||
fallback_html_version: u64 = 0,
|
||||
|
||||
tinycc: bool = true,
|
||||
|
||||
pub fn updateRuntime(this: *BunBuildOptions) anyerror!void {
|
||||
if (std.fs.cwd().openFile("src/runtime.out.js", .{ .mode = .read_only })) |file| {
|
||||
defer file.close();
|
||||
@@ -109,13 +89,7 @@ const BunBuildOptions = struct {
|
||||
|
||||
pub fn step(this: BunBuildOptions, b: anytype) *std.build.OptionsStep {
|
||||
var opts = b.addOptions();
|
||||
opts.addOption(@TypeOf(this.is_canary), "is_canary", this.is_canary);
|
||||
opts.addOption(@TypeOf(this.canary_revision), "canary_revision", this.canary_revision);
|
||||
opts.addOption(
|
||||
std.SemanticVersion,
|
||||
"version",
|
||||
std.SemanticVersion.parse(this.version) catch @panic(b.fmt("Invalid version: {s}", .{this.version})),
|
||||
);
|
||||
opts.addOption(@TypeOf(this.canary), "is_canary", this.canary);
|
||||
opts.addOption(@TypeOf(this.sha), "sha", this.sha);
|
||||
opts.addOption(@TypeOf(this.baseline), "baseline", this.baseline);
|
||||
opts.addOption(@TypeOf(this.bindgen), "bindgen", this.bindgen);
|
||||
@@ -123,15 +97,35 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(@TypeOf(this.base_path), "base_path", this.base_path);
|
||||
opts.addOption(@TypeOf(this.runtime_js_version), "runtime_js_version", this.runtime_js_version);
|
||||
opts.addOption(@TypeOf(this.fallback_html_version), "fallback_html_version", this.fallback_html_version);
|
||||
opts.addOption(@TypeOf(this.tinycc), "tinycc", this.tinycc);
|
||||
return opts;
|
||||
}
|
||||
};
|
||||
|
||||
// relative to the prefix
|
||||
var output_dir: []const u8 = "";
|
||||
fn panicIfNotFound(comptime filepath: []const u8) []const u8 {
|
||||
var file = std.fs.cwd().openFile(filepath, .{ .optimize = .read_only }) catch |err| {
|
||||
std.debug.panic("error: {s} opening {s}. Please ensure you've downloaded git submodules, and ran `make vendor`, `make jsc`.", .{ filepath, @errorName(err) });
|
||||
};
|
||||
file.close();
|
||||
|
||||
var optimize: std.builtin.OptimizeMode = .Debug;
|
||||
return filepath;
|
||||
}
|
||||
|
||||
const fmt = struct {
|
||||
pub usingnamespace @import("std").fmt;
|
||||
|
||||
pub fn hexInt(value: anytype) @TypeOf(std.fmt.fmtSliceHexLower("")) {
|
||||
return std.fmt.fmtSliceHexLower(std.mem.asBytes(&value));
|
||||
}
|
||||
|
||||
pub fn hexIntUp(value: anytype) @TypeOf(std.fmt.fmtSliceHexUpper("")) {
|
||||
return std.fmt.fmtSliceHexUpper(std.mem.asBytes(&value));
|
||||
}
|
||||
};
|
||||
|
||||
var x64 = "x64";
|
||||
var optimize: std.builtin.OptimizeMode = undefined;
|
||||
|
||||
const Build = std.Build;
|
||||
const CrossTarget = std.zig.CrossTarget;
|
||||
@@ -152,26 +146,6 @@ pub fn build(b: *Build) !void {
|
||||
}
|
||||
|
||||
pub fn build_(b: *Build) !void {
|
||||
switch (comptime zig_version.order(std.SemanticVersion.parse(recommended_zig_version) catch unreachable)) {
|
||||
.eq => {},
|
||||
.lt => {
|
||||
@compileError("The minimum version of Zig required to compile Bun is " ++ recommended_zig_version ++ ", found " ++ @import("builtin").zig_version_string ++ ". Please follow the instructions at https://bun.sh/docs/project/contributing. You may need to re-run `bun setup`.");
|
||||
},
|
||||
.gt => {
|
||||
const colors = std.io.getStdErr().supportsAnsiEscapeCodes();
|
||||
std.debug.print(
|
||||
"{s}WARNING:\nBun recommends Zig version '{s}', but found '{s}', build may fail...\nMake sure you are following the instructions at https://bun.sh/docs/project/contributing\n{s}You can update to the right version using 'zigup {s}'\n\n",
|
||||
.{
|
||||
if (colors) "\x1b[1;33m" else "",
|
||||
recommended_zig_version,
|
||||
builtin.zig_version_string,
|
||||
if (colors) "\x1b[0m" else "",
|
||||
recommended_zig_version,
|
||||
},
|
||||
);
|
||||
},
|
||||
}
|
||||
|
||||
// Standard target options allows the person running `zig build` to choose
|
||||
// what target to build for. Here we do not override the defaults, which
|
||||
// means any target is allowed, and the default is native. Other options
|
||||
@@ -181,14 +155,8 @@ pub fn build_(b: *Build) !void {
|
||||
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall.
|
||||
optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
var generated_code_directory = b.option([]const u8, "generated-code", "Set the generated code directory") orelse "";
|
||||
|
||||
if (generated_code_directory.len == 0) {
|
||||
generated_code_directory = b.pathFromRoot("build/codegen");
|
||||
}
|
||||
|
||||
var output_dir_buf = std.mem.zeroes([4096]u8);
|
||||
const bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
var bin_label = if (optimize == std.builtin.OptimizeMode.Debug) "packages/debug-bun-" else "packages/bun-";
|
||||
|
||||
var triplet_buf: [64]u8 = undefined;
|
||||
var os_tagname = @tagName(target.getOs().tag);
|
||||
@@ -206,7 +174,7 @@ pub fn build_(b: *Build) !void {
|
||||
&triplet_buf,
|
||||
os_tagname,
|
||||
);
|
||||
const osname = triplet_buf[0..os_tagname.len];
|
||||
var osname = triplet_buf[0..os_tagname.len];
|
||||
triplet_buf[osname.len] = '-';
|
||||
|
||||
std.mem.copy(u8, triplet_buf[osname.len + 1 ..], @tagName(target.getCpuArch()));
|
||||
@@ -217,19 +185,17 @@ pub fn build_(b: *Build) !void {
|
||||
cpuArchName = cpuArchName[0..3];
|
||||
}
|
||||
|
||||
const triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
var triplet = triplet_buf[0 .. osname.len + cpuArchName.len + 1];
|
||||
|
||||
const outfile_maybe = b.option([]const u8, "output-file", "target to install to");
|
||||
|
||||
if (outfile_maybe) |outfile| {
|
||||
output_dir = try pathRel(b.allocator, b.install_prefix, std.fs.path.dirname(outfile) orelse "");
|
||||
if (b.option([]const u8, "output-dir", "target to install to") orelse std.os.getenv("OUTPUT_DIR")) |output_dir_| {
|
||||
output_dir = try pathRel(b.allocator, b.install_prefix, output_dir_);
|
||||
} else {
|
||||
const output_dir_base = try std.fmt.bufPrint(&output_dir_buf, "{s}{s}", .{ bin_label, triplet });
|
||||
output_dir = try pathRel(b.allocator, b.install_prefix, output_dir_base);
|
||||
}
|
||||
|
||||
is_debug_build = optimize == OptimizeMode.Debug;
|
||||
const bun_executable_name = if (outfile_maybe) |outfile| std.fs.path.basename(outfile[0 .. outfile.len - std.fs.path.extension(outfile).len]) else if (is_debug_build) "bun-debug" else "bun";
|
||||
const bun_executable_name = if (optimize == std.builtin.OptimizeMode.Debug) "bun-debug" else "bun";
|
||||
const root_src = if (target.getOsTag() == std.Target.Os.Tag.freestanding)
|
||||
"root_wasm.zig"
|
||||
else
|
||||
@@ -251,31 +217,10 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative(root_src),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = .{ .cwd_relative = b.pathFromRoot(".") },
|
||||
.main_pkg_path = .{ .cwd_relative = b.pathFromRoot(".") },
|
||||
});
|
||||
|
||||
if (!exists(b.pathFromRoot(try std.fs.path.join(b.allocator, &.{
|
||||
"src",
|
||||
"js_lexer",
|
||||
"id_continue_bitset.blob",
|
||||
})))) {
|
||||
const identifier_data = b.pathFromRoot(try std.fs.path.join(b.allocator, &.{ "src", "js_lexer", "identifier_data.zig" }));
|
||||
var run_step = b.addSystemCommand(&.{
|
||||
b.zig_exe,
|
||||
"run",
|
||||
identifier_data,
|
||||
});
|
||||
run_step.has_side_effects = true;
|
||||
obj.step.dependOn(&run_step.step);
|
||||
}
|
||||
|
||||
b.reference_trace = if (b.option(u32, "reference-trace", "Set the reference trace")) |trace|
|
||||
if (trace == 0)
|
||||
null
|
||||
else
|
||||
trace
|
||||
else
|
||||
16;
|
||||
b.reference_trace = 16;
|
||||
|
||||
var default_build_options: BunBuildOptions = brk: {
|
||||
const is_baseline = arch.isX86() and (target.cpu_model == .baseline or
|
||||
@@ -286,7 +231,7 @@ pub fn build_(b: *Build) !void {
|
||||
git_sha = b.allocator.dupeZ(u8, sha) catch unreachable;
|
||||
} else {
|
||||
sha: {
|
||||
const result = std.ChildProcess.run(.{
|
||||
const result = std.ChildProcess.exec(.{
|
||||
.allocator = b.allocator,
|
||||
.argv = &.{
|
||||
"git",
|
||||
@@ -301,17 +246,9 @@ pub fn build_(b: *Build) !void {
|
||||
}
|
||||
}
|
||||
|
||||
const is_canary, const canary_revision = if (b.option(u32, "canary", "Treat this as a canary build")) |rev|
|
||||
if (rev == 0)
|
||||
.{ false, 0 }
|
||||
else
|
||||
.{ true, rev }
|
||||
else
|
||||
.{ false, 0 };
|
||||
const is_canary = (std.os.getenvZ("BUN_CANARY") orelse "0")[0] == '1';
|
||||
break :brk .{
|
||||
.is_canary = is_canary,
|
||||
.canary_revision = canary_revision,
|
||||
.version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0",
|
||||
.canary = is_canary,
|
||||
.sha = git_sha,
|
||||
.baseline = is_baseline,
|
||||
.bindgen = false,
|
||||
@@ -346,8 +283,8 @@ pub fn build_(b: *Build) !void {
|
||||
min_version,
|
||||
max_version,
|
||||
obj.target.getCpuModel().name,
|
||||
}) catch {};
|
||||
std.io.getStdErr().writer().print("Zig v{s}\n", .{builtin.zig_version_string}) catch {};
|
||||
}) catch unreachable;
|
||||
std.io.getStdErr().writer().print("Output: {s}/{s}\n\n", .{ output_dir, bun_executable_name }) catch unreachable;
|
||||
|
||||
defer obj_step.dependOn(&obj.step);
|
||||
|
||||
@@ -366,23 +303,13 @@ pub fn build_(b: *Build) !void {
|
||||
|
||||
obj.addOptions("build_options", actual_build_options.step(b));
|
||||
|
||||
// Generated Code
|
||||
// TODO: exit with a better error early if these files do not exist. it is an indication someone ran `zig build` directly without the code generators.
|
||||
obj.addModule("ZigGeneratedClasses", b.createModule(.{
|
||||
.source_file = .{ .path = b.pathJoin(&.{ generated_code_directory, "ZigGeneratedClasses.zig" }) },
|
||||
}));
|
||||
obj.addModule("ResolvedSourceTag", b.createModule(.{
|
||||
.source_file = .{ .path = b.pathJoin(&.{ generated_code_directory, "ResolvedSourceTag.zig" }) },
|
||||
}));
|
||||
|
||||
obj.linkLibC();
|
||||
obj.dll_export_fns = true;
|
||||
obj.strip = false;
|
||||
obj.omit_frame_pointer = optimize != .Debug;
|
||||
obj.subsystem = .Console;
|
||||
|
||||
obj.strip = false;
|
||||
obj.bundle_compiler_rt = false;
|
||||
obj.omit_frame_pointer = optimize != .Debug;
|
||||
// Disable stack probing on x86 so we don't need to include compiler_rt
|
||||
if (target.getCpuArch().isX86() or target.isWindows()) obj.disable_stack_probing = true;
|
||||
if (target.getCpuArch().isX86()) obj.disable_stack_probing = true;
|
||||
|
||||
if (b.option(bool, "for-editor", "Do not emit bin, just check for errors") orelse false) {
|
||||
// obj.emit_bin = .no_emit;
|
||||
@@ -404,7 +331,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("src/bindgen.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -421,7 +348,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("root_wasm.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer wasm_step.dependOn(&wasm.step);
|
||||
wasm.strip = false;
|
||||
@@ -440,7 +367,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("misctools/http_bench.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -454,7 +381,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("misctools/machbench.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -468,7 +395,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("misctools/fetch.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -482,7 +409,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("src/bench/string-handling.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -496,7 +423,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("src/sha.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -510,7 +437,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("src/sourcemap/vlq_bench.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -524,7 +451,7 @@ pub fn build_(b: *Build) !void {
|
||||
.root_source_file = FileSource.relative("misctools/tgz.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
defer headers_step.dependOn(&headers_obj.step);
|
||||
try configureObjectStep(b, headers_obj, headers_step, @TypeOf(target), target);
|
||||
@@ -534,14 +461,14 @@ pub fn build_(b: *Build) !void {
|
||||
{
|
||||
const headers_step = b.step("test", "Build test");
|
||||
|
||||
const test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
const test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
const test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
var test_file = b.option([]const u8, "test-file", "Input file for test");
|
||||
var test_bin_ = b.option([]const u8, "test-bin", "Emit bin to");
|
||||
var test_filter = b.option([]const u8, "test-filter", "Filter for test");
|
||||
|
||||
var headers_obj: *CompileStep = b.addTest(.{
|
||||
.root_source_file = FileSource.relative(test_file orelse "src/main.zig"),
|
||||
.target = target,
|
||||
.main_mod_path = obj.main_mod_path,
|
||||
.main_pkg_path = obj.main_pkg_path,
|
||||
});
|
||||
headers_obj.filter = test_filter;
|
||||
if (test_bin_) |test_bin| {
|
||||
@@ -563,19 +490,7 @@ pub fn build_(b: *Build) !void {
|
||||
headers_obj.addOptions("build_options", default_build_options.step(b));
|
||||
}
|
||||
|
||||
// Running `zig build` with no arguments is almost always a mistake.
|
||||
const mistake_message = b.addSystemCommand(&.{
|
||||
"echo",
|
||||
\\
|
||||
\\error: To build Bun from source, please use `bun run setup` instead of `zig build`"
|
||||
\\
|
||||
\\If you want to build the zig code only, run:
|
||||
\\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
|
||||
\\
|
||||
\\For more info, see https://bun.sh/docs/project/contributing
|
||||
\\
|
||||
});
|
||||
b.default_step.dependOn(&mistake_message.step);
|
||||
b.default_step.dependOn(obj_step);
|
||||
}
|
||||
|
||||
pub var original_make_fn: ?*const fn (step: *std.build.Step) anyerror!void = null;
|
||||
|
||||
@@ -84,7 +84,7 @@ _bun_completions() {
|
||||
|
||||
local SUBCOMMANDS="dev bun create run install add remove upgrade completions discord help init pm x";
|
||||
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --env-file --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[LONG_OPTIONS]="--use --cwd --bunfile --server-bunfile --config --disable-react-fast-refresh --disable-hmr --extension-order --jsx-factory --jsx-fragment --extension-order --jsx-factory --jsx-fragment --jsx-import-source --jsx-production --jsx-runtime --main-fields --no-summary --version --platform --public-dir --tsconfig-override --define --external --help --inject --loader --origin --port --dump-environment-variables --dump-limits --disable-bun-js";
|
||||
GLOBAL_OPTIONS[SHORT_OPTIONS]="-c -v -d -e -h -i -l -u -p";
|
||||
|
||||
PACKAGE_OPTIONS[ADD_OPTIONS_LONG]="--development --optional";
|
||||
|
||||
@@ -51,7 +51,7 @@ function __bun_last_cmd --argument-names n
|
||||
end
|
||||
|
||||
set -l bun_install_boolean_flags yarn production optional development no-save dry-run force no-cache silent verbose global
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependencies" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
|
||||
set -l bun_install_boolean_flags_descriptions "Write a yarn.lock file (yarn v1)" "Don't install devDependencies" "Add dependency to optionalDependencies" "Add dependency to devDependencies" "Don't install devDependencies" "Don't install anything" "Always request the latest versions from the registry & reinstall all dependenices" "Ignore manifest cache entirely" "Don't output anything" "Excessively verbose logging" "Use global folder"
|
||||
|
||||
set -l bun_builtin_cmds dev create help bun upgrade discord run install remove add init link unlink pm x
|
||||
set -l bun_builtin_cmds_without_run dev create help bun upgrade discord install remove add init pm x
|
||||
|
||||
1305
completions/bun.zsh
1305
completions/bun.zsh
File diff suppressed because it is too large
Load Diff
@@ -78,9 +78,6 @@ subcommands:
|
||||
- name: server-bunfile
|
||||
type: string
|
||||
summary: "Use a specific .bun file for SSR in bun dev (default: node_modules.server.bun)"
|
||||
- name: env-file
|
||||
type: string
|
||||
summary: "Load environment variables from the specified file(s)"
|
||||
- name: extension-order
|
||||
type: string
|
||||
summary: "defaults to: .tsx,.ts,.jsx,.js,.json"
|
||||
@@ -121,7 +118,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
@@ -156,7 +153,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- no-cache -- "Ignore manifest cache entirely"
|
||||
- silent -- "Don't output anything"
|
||||
- verbose -- "Excessively verbose logging"
|
||||
@@ -194,7 +191,7 @@ subcommands:
|
||||
- frozen-lockfile -- "Disallow changes to lockfile"
|
||||
- no-save --
|
||||
- dry-run -- "Don't install anything"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependencies"
|
||||
- force -- "Always request the latest versions from the registry & reinstall all dependenices"
|
||||
- name: cache-dir
|
||||
type: string
|
||||
summary: "Store & load cached data from a specific directory path"
|
||||
|
||||
29
dockerhub/Dockerfile-alpine
Normal file
29
dockerhub/Dockerfile-alpine
Normal file
@@ -0,0 +1,29 @@
|
||||
# bun:alpine
|
||||
# Not officially supported (yet)
|
||||
|
||||
ARG GLIBC_RELEASE=2.35-r0
|
||||
|
||||
FROM alpine:latest AS build
|
||||
|
||||
WORKDIR /tmp
|
||||
RUN apk --no-cache add unzip
|
||||
|
||||
ARG GLIBC_RELEASE
|
||||
RUN wget https://alpine-pkgs.sgerrand.com/sgerrand.rsa.pub && \
|
||||
wget https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_RELEASE}/glibc-${GLIBC_RELEASE}.apk
|
||||
|
||||
ADD https://github.com/oven-sh/bun/releases/latest/download/bun-linux-x64.zip bun-linux-x64.zip
|
||||
RUN unzip bun-linux-x64.zip
|
||||
|
||||
FROM alpine:latest
|
||||
|
||||
ARG GLIBC_RELEASE
|
||||
COPY --from=build /tmp/sgerrand.rsa.pub /etc/apk/keys
|
||||
COPY --from=build /tmp/glibc-${GLIBC_RELEASE}.apk /tmp
|
||||
COPY --from=build /tmp/bun-linux-x64/bun /usr/local/bin
|
||||
|
||||
RUN apk --no-cache --force-overwrite add /tmp/glibc-${GLIBC_RELEASE}.apk \
|
||||
&& rm /etc/apk/keys/sgerrand.rsa.pub \
|
||||
&& rm /tmp/glibc-${GLIBC_RELEASE}.apk
|
||||
|
||||
RUN bun --version
|
||||
77
dockerhub/Dockerfile-debian
Normal file
77
dockerhub/Dockerfile-debian
Normal file
@@ -0,0 +1,77 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
dirmngr \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
unzip \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) build="x64-baseline";; \
|
||||
arm64) build="aarch64";; \
|
||||
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
&& case "$version" in \
|
||||
latest | canary | bun-v*) tag="$version"; ;; \
|
||||
v*) tag="bun-$version"; ;; \
|
||||
*) tag="bun-v$version"; ;; \
|
||||
esac \
|
||||
&& case "$tag" in \
|
||||
latest) release="latest/download"; ;; \
|
||||
*) release="download/$tag"; ;; \
|
||||
esac \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: unknown release: ($tag)" && exit 1) \
|
||||
&& for key in \
|
||||
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
|
||||
; do \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|
||||
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
|
||||
done \
|
||||
&& gpg --update-trustdb \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
--uid 1000 \
|
||||
--gid bun \
|
||||
--shell /bin/sh \
|
||||
--create-home
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bunx /usr/local/bin
|
||||
|
||||
WORKDIR /home/bun/app
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
CMD ["/usr/local/bin/bun"]
|
||||
@@ -1,4 +1,5 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
# Not officially supported (yet)
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
@@ -17,7 +18,7 @@ RUN apt-get update -qq \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) build="x64-baseline";; \
|
||||
arm64) build="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
&& case "$version" in \
|
||||
@@ -33,42 +34,36 @@ RUN apt-get update -qq \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: $tag" && exit 1) \
|
||||
|| (echo "error: unknown release: ($tag)" && exit 1) \
|
||||
&& for key in \
|
||||
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
|
||||
; do \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|
||||
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
|
||||
done \
|
||||
&& gpg --update-trustdb \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
FROM gcr.io/distroless/base-nossl-debian11
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
|
||||
# Temporarily use the `build`-stage image binaries to create a symlink:
|
||||
RUN --mount=type=bind,from=build,source=/usr/bin,target=/usr/bin \
|
||||
--mount=type=bind,from=build,source=/bin,target=/bin <<EOF
|
||||
ln -s /usr/local/bin/bun /usr/local/bin/bunx
|
||||
which bunx
|
||||
EOF
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bunx /usr/local/bin
|
||||
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["/usr/local/bin/bun"]
|
||||
CMD ["/usr/local/bin/bun"]
|
||||
@@ -1,118 +0,0 @@
|
||||
FROM alpine:3.18 AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
|
||||
# TODO: Instead of downloading glibc from a third-party source, we should
|
||||
# build it from source. This is a temporary solution.
|
||||
# See: https://github.com/sgerrand/alpine-pkg-glibc
|
||||
|
||||
# https://github.com/sgerrand/alpine-pkg-glibc/releases
|
||||
# https://github.com/sgerrand/alpine-pkg-glibc/issues/176
|
||||
ARG GLIBC_VERSION=2.34-r0
|
||||
|
||||
# https://github.com/oven-sh/bun/issues/5545#issuecomment-1722461083
|
||||
ARG GLIBC_VERSION_AARCH64=2.26-r1
|
||||
|
||||
RUN apk --no-cache add \
|
||||
ca-certificates \
|
||||
curl \
|
||||
dirmngr \
|
||||
gpg \
|
||||
gpg-agent \
|
||||
unzip \
|
||||
&& arch="$(apk --print-arch)" \
|
||||
&& case "${arch##*-}" in \
|
||||
x86_64) build="x64-baseline";; \
|
||||
aarch64) build="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
&& case "$version" in \
|
||||
latest | canary | bun-v*) tag="$version"; ;; \
|
||||
v*) tag="bun-$version"; ;; \
|
||||
*) tag="bun-v$version"; ;; \
|
||||
esac \
|
||||
&& case "$tag" in \
|
||||
latest) release="latest/download"; ;; \
|
||||
*) release="download/$tag"; ;; \
|
||||
esac \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/bun-linux-$build.zip" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: $tag" && exit 1) \
|
||||
&& for key in \
|
||||
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
|
||||
; do \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|
||||
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
|
||||
done \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& cd /tmp \
|
||||
&& case "${arch##*-}" in \
|
||||
x86_64) curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-${GLIBC_VERSION}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc v${GLIBC_VERSION}" && exit 1) \
|
||||
&& mv "glibc-${GLIBC_VERSION}.apk" glibc.apk \
|
||||
&& curl "https://github.com/sgerrand/alpine-pkg-glibc/releases/download/${GLIBC_VERSION}/glibc-bin-${GLIBC_VERSION}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION}" && exit 1) \
|
||||
&& mv "glibc-bin-${GLIBC_VERSION}.apk" glibc-bin.apk ;; \
|
||||
aarch64) curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-${GLIBC_VERSION_AARCH64}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc v${GLIBC_VERSION_AARCH64}" && exit 1) \
|
||||
&& mv "glibc-${GLIBC_VERSION_AARCH64}.apk" glibc.apk \
|
||||
&& curl "https://raw.githubusercontent.com/squishyu/alpine-pkg-glibc-aarch64-bin/master/glibc-bin-${GLIBC_VERSION_AARCH64}.apk" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: glibc-bin v${GLIBC_VERSION_AARCH64}" && exit 1) \
|
||||
&& mv "glibc-bin-${GLIBC_VERSION_AARCH64}.apk" glibc-bin.apk ;; \
|
||||
*) echo "error: unsupported architecture '$arch'"; exit 1 ;; \
|
||||
esac
|
||||
|
||||
FROM alpine:3.18
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY --from=build /tmp/glibc.apk /tmp/
|
||||
COPY --from=build /tmp/glibc-bin.apk /tmp/
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/
|
||||
COPY docker-entrypoint.sh /usr/local/bin/
|
||||
|
||||
RUN addgroup -g 1000 bun \
|
||||
&& adduser -u 1000 -G bun -s /bin/sh -D bun \
|
||||
&& apk --no-cache --force-overwrite --allow-untrusted add \
|
||||
/tmp/glibc.apk \
|
||||
/tmp/glibc-bin.apk \
|
||||
&& rm /tmp/glibc.apk \
|
||||
&& rm /tmp/glibc-bin.apk \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
WORKDIR /home/bun/app
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
CMD ["/usr/local/bin/bun"]
|
||||
@@ -1,7 +1,7 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
ARG BUN_VERSION=0.5.7
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
@@ -17,7 +17,7 @@ RUN apt-get update -qq \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) build="x64-baseline";; \
|
||||
arm64) build="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
&& case "$version" in \
|
||||
@@ -33,49 +33,44 @@ RUN apt-get update -qq \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: $tag" && exit 1) \
|
||||
|| (echo "error: unknown release: ($tag)" && exit 1) \
|
||||
&& for key in \
|
||||
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
|
||||
; do \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|
||||
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
|
||||
done \
|
||||
&& gpg --update-trustdb \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
--uid 1000 \
|
||||
--gid bun \
|
||||
--shell /bin/sh \
|
||||
--create-home \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
--create-home
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bunx /usr/local/bin
|
||||
|
||||
WORKDIR /home/bun/app
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
FROM debian:bullseye-slim AS build
|
||||
|
||||
# https://github.com/oven-sh/bun/releases
|
||||
ARG BUN_VERSION=latest
|
||||
ARG BUN_VERSION=0.5.7
|
||||
|
||||
RUN apt-get update -qq \
|
||||
&& apt-get install -qq --no-install-recommends \
|
||||
@@ -17,7 +17,7 @@ RUN apt-get update -qq \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) build="x64-baseline";; \
|
||||
arm64) build="aarch64";; \
|
||||
*) echo "error: unsupported architecture: $arch"; exit 1 ;; \
|
||||
*) echo "error: unsupported architecture: ($arch)"; exit 1 ;; \
|
||||
esac \
|
||||
&& version="$BUN_VERSION" \
|
||||
&& case "$version" in \
|
||||
@@ -33,47 +33,44 @@ RUN apt-get update -qq \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
|| (echo "error: failed to download: $tag" && exit 1) \
|
||||
|| (echo "error: unknown release: ($tag)" && exit 1) \
|
||||
&& for key in \
|
||||
"F3DCC08A8572C0749B3E18888EAB4D40A7B22B59" \
|
||||
; do \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys "$key" \
|
||||
|| gpg --batch --keyserver keyserver.ubuntu.com --recv-keys "$key" ; \
|
||||
done \
|
||||
&& gpg --update-trustdb \
|
||||
&& curl "https://github.com/oven-sh/bun/releases/$release/SHASUMS256.txt.asc" \
|
||||
-fsSLO \
|
||||
--compressed \
|
||||
--retry 5 \
|
||||
&& gpg --batch --decrypt --output SHASUMS256.txt SHASUMS256.txt.asc \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& grep " bun-linux-$build.zip\$" SHASUMS256.txt | sha256sum -c - \
|
||||
|| (echo "error: failed to verify: $tag" && exit 1) \
|
||||
|| (echo "error: failed to verify release: ($tag)" && exit 1) \
|
||||
&& unzip "bun-linux-$build.zip" \
|
||||
&& mv "bun-linux-$build/bun" /usr/local/bin/bun \
|
||||
&& rm -f "bun-linux-$build.zip" SHASUMS256.txt.asc SHASUMS256.txt \
|
||||
&& chmod +x /usr/local/bin/bun
|
||||
&& chmod +x /usr/local/bin/bun \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
|
||||
FROM debian:bullseye
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin/bun
|
||||
|
||||
# Disable the runtime transpiler cache by default inside Docker containers.
|
||||
# On ephemeral containers, the cache is not useful
|
||||
ARG BUN_RUNTIME_TRANSPILER_CACHE_PATH=0
|
||||
ENV BUN_RUNTIME_TRANSPILER_CACHE_PATH=${BUN_RUNTIME_TRANSPILER_CACHE_PATH}
|
||||
|
||||
RUN groupadd bun \
|
||||
--gid 1000 \
|
||||
&& useradd bun \
|
||||
--uid 1000 \
|
||||
--gid bun \
|
||||
--shell /bin/sh \
|
||||
--create-home \
|
||||
&& ln -s /usr/local/bin/bun /usr/local/bin/bunx \
|
||||
&& which bun \
|
||||
&& which bunx \
|
||||
&& bun --version
|
||||
--create-home
|
||||
|
||||
COPY docker-entrypoint.sh /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bun /usr/local/bin
|
||||
COPY --from=build /usr/local/bin/bunx /usr/local/bin
|
||||
|
||||
WORKDIR /home/bun/app
|
||||
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
|
||||
|
||||
@@ -26,10 +26,10 @@ Below is a quick "cheat sheet" that doubles as a table of contents. Click an ite
|
||||
|
||||
---
|
||||
|
||||
- [`File`](#file)
|
||||
- A subclass of `Blob` that represents a file. Has a `name` and `lastModified` timestamp. There is experimental support in Node.js v20.
|
||||
<!-- - [`File`](#file)
|
||||
- _Browser only_. A subclass of `Blob` that represents a file. Has a `name` and `lastModified` timestamp. There is experimental support in Node.js v20; Bun does not support `File` yet; most of its functionality is provided by `BunFile`.
|
||||
|
||||
---
|
||||
--- -->
|
||||
|
||||
- [`BunFile`](#bunfile)
|
||||
- _Bun only_. A subclass of `Blob` that represents a lazily-loaded file on disk. Created with `Bun.file(path)`.
|
||||
|
||||
@@ -300,7 +300,7 @@ interface BunFile {
|
||||
readonly type: string;
|
||||
|
||||
text(): Promise<string>;
|
||||
stream(): ReadableStream;
|
||||
stream(): Promise<ReadableStream>;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
json(): Promise<any>;
|
||||
writer(params: { highWaterMark?: number }): FileSink;
|
||||
|
||||
@@ -93,7 +93,6 @@ interface Bun {
|
||||
style: "nextjs";
|
||||
origin?: string;
|
||||
assetPrefix?: string;
|
||||
fileExtensions?: string[];
|
||||
});
|
||||
|
||||
reload(): void;
|
||||
|
||||
110
docs/api/glob.md
110
docs/api/glob.md
@@ -1,110 +0,0 @@
|
||||
Bun includes a fast native implementation of file globbing.
|
||||
|
||||
## Quickstart
|
||||
|
||||
**Scan a directory for files matching `*.ts`**:
|
||||
|
||||
```ts
|
||||
import { Glob } from "bun";
|
||||
|
||||
const glob = new Glob("*.ts");
|
||||
|
||||
for await (const file of glob.scan(".")) {
|
||||
console.log(file); // => "index.ts"
|
||||
}
|
||||
```
|
||||
|
||||
**Match a string against a glob pattern**:
|
||||
|
||||
```ts
|
||||
import { Glob } from "bun";
|
||||
|
||||
const glob = new Glob("*.ts");
|
||||
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("index.js"); // => false
|
||||
```
|
||||
|
||||
`Glob` is a class which implements the following interface:
|
||||
|
||||
```ts
|
||||
class Glob {
|
||||
scan(root: string | ScanOptions): AsyncIterable<string>;
|
||||
scanSync(root: string | ScanOptions): Iterable<string>;
|
||||
|
||||
match(path: string): boolean;
|
||||
}
|
||||
|
||||
interface ScanOptions {
|
||||
/**
|
||||
* The root directory to start matching from. Defaults to `process.cwd()`
|
||||
*/
|
||||
cwd?: string;
|
||||
|
||||
/**
|
||||
* Allow patterns to match entries that begin with a period (`.`).
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean;
|
||||
|
||||
/**
|
||||
* Return the absolute path for entries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
absolute?: boolean;
|
||||
|
||||
/**
|
||||
* Indicates whether to traverse descendants of symbolic link directories.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
followSymlinks?: boolean;
|
||||
|
||||
/**
|
||||
* Throw an error when symbolic link is broken
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
throwErrorOnBrokenSymlink?: boolean;
|
||||
|
||||
/**
|
||||
* Return only files.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
onlyFiles?: boolean;
|
||||
}
|
||||
```
|
||||
|
||||
## Supported Glob Patterns
|
||||
|
||||
Bun supports the following glob patterns:
|
||||
|
||||
### `*` - Match any number of characters except `/`
|
||||
|
||||
```ts
|
||||
const glob = new Glob("*.ts");
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("src/index.ts"); // => false
|
||||
```
|
||||
|
||||
### `**` - Match any number of characters including `/`
|
||||
|
||||
```ts
|
||||
const glob = new Glob("**/*.ts");
|
||||
glob.match("index.ts"); // => true
|
||||
glob.match("src/index.ts"); // => true
|
||||
glob.match("src/index.js"); // => false
|
||||
```
|
||||
|
||||
### `{a,b,c}` - Match any of the given patterns
|
||||
|
||||
```ts
|
||||
const glob = new Glob("{a,b,c}.ts");
|
||||
glob.match("a.ts"); // => true
|
||||
glob.match("b.ts"); // => true
|
||||
glob.match("c.ts"); // => true
|
||||
glob.match("d.ts"); // => false
|
||||
```
|
||||
@@ -38,11 +38,6 @@ import.meta.resolveSync("zod")
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.env`
|
||||
- An alias to `process.env`.
|
||||
|
||||
---
|
||||
|
||||
- `import.meta.resolve{Sync}`
|
||||
- Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file?
|
||||
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
Bun implements a semantic versioning API which can be used to compare versions and determine if a version is compatible with another range of versions. The versions and ranges are designed to be compatible with `node-semver`, which is used by npm clients.
|
||||
|
||||
It's about 20x faster than `node-semver`.
|
||||
|
||||

|
||||
|
||||
Currently, this API is two functions.
|
||||
|
||||
#### `Bun.semver.satisfies(version: string, range: string): boolean`
|
||||
|
||||
Returns `true` if `version` satisfies `range`, otherwise `false`.
|
||||
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
import { semver } from "bun";
|
||||
|
||||
semver.satisfies("1.0.0", "^1.0.0"); // true
|
||||
semver.satisfies("1.0.0", "^1.0.1"); // false
|
||||
semver.satisfies("1.0.0", "~1.0.0"); // true
|
||||
semver.satisfies("1.0.0", "~1.0.1"); // false
|
||||
semver.satisfies("1.0.0", "1.0.0"); // true
|
||||
semver.satisfies("1.0.0", "1.0.1"); // false
|
||||
semver.satisfies("1.0.1", "1.0.0"); // false
|
||||
semver.satisfies("1.0.0", "1.0.x"); // true
|
||||
semver.satisfies("1.0.0", "1.x.x"); // true
|
||||
semver.satisfies("1.0.0", "x.x.x"); // true
|
||||
semver.satisfies("1.0.0", "1.0.0 - 2.0.0"); // true
|
||||
semver.satisfies("1.0.0", "1.0.0 - 1.0.1"); // true
|
||||
```
|
||||
|
||||
If `range` is invalid, it returns false. If `version` is invalid, it returns false.
|
||||
|
||||
#### `Bun.semver.order(versionA: string, versionB: string): 0 | 1 | -1`
|
||||
|
||||
Returns `0` if `versionA` and `versionB` are equal, `1` if `versionA` is greater than `versionB`, and `-1` if `versionA` is less than `versionB`.
|
||||
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
import { semver } from "bun";
|
||||
|
||||
semver.order("1.0.0", "1.0.0"); // 0
|
||||
semver.order("1.0.0", "1.0.1"); // -1
|
||||
semver.order("1.0.1", "1.0.0"); // 1
|
||||
|
||||
const unsorted = ["1.0.0", "1.0.1", "1.0.0-alpha", "1.0.0-beta", "1.0.0-rc"];
|
||||
unsorted.sort(semver.order); // ["1.0.0-alpha", "1.0.0-beta", "1.0.0-rc", "1.0.0", "1.0.1"]
|
||||
console.log(unsorted);
|
||||
```
|
||||
|
||||
If you need other semver functions, feel free to open an issue or pull request.
|
||||
@@ -183,60 +183,6 @@ const proc = Bun.spawn(["echo", "hello"]);
|
||||
proc.unref();
|
||||
```
|
||||
|
||||
## Inter-process communication (IPC)
|
||||
|
||||
Bun supports direct inter-process communication channel between two `bun` processes. To receive messages from a spawned Bun subprocess, specify an `ipc` handler.
|
||||
{%callout%}
|
||||
**Note** — This API is only compatible with other `bun` processes. Use `process.execPath` to get a path to the currently running `bun` executable.
|
||||
{%/callout%}
|
||||
|
||||
```ts#parent.ts
|
||||
const child = Bun.spawn(["bun", "child.ts"], {
|
||||
ipc(message) {
|
||||
/**
|
||||
* The message received from the sub process
|
||||
**/
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The parent process can send messages to the subprocess using the `.send()` method on the returned `Subprocess` instance. A reference to the sending subprocess is also available as the second argument in the `ipc` handler.
|
||||
|
||||
```ts#parent.ts
|
||||
const childProc = Bun.spawn(["bun", "child.ts"], {
|
||||
ipc(message, childProc) {
|
||||
/**
|
||||
* The message received from the sub process
|
||||
**/
|
||||
childProc.send("Respond to child")
|
||||
},
|
||||
});
|
||||
|
||||
childProc.send("I am your father"); // The parent can send messages to the child as well
|
||||
```
|
||||
|
||||
Meanwhile the child process can send messages to its parent using with `process.send()` and receive messages with `process.on("message")`. This is the same API used for `child_process.fork()` in Node.js.
|
||||
|
||||
```ts#child.ts
|
||||
process.send("Hello from child as string");
|
||||
process.send({ message: "Hello from child as object" });
|
||||
|
||||
process.on("message", (message) => {
|
||||
// print message from parent
|
||||
console.log(message);
|
||||
});
|
||||
```
|
||||
|
||||
All messages are serialized using the JSC `serialize` API, which allows for the same set of [transferrable types](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects) supported by `postMessage` and `structuredClone`, including strings, typed arrays, streams, and objects.
|
||||
|
||||
```ts#child.ts
|
||||
// send a string
|
||||
process.send("Hello from child as string");
|
||||
|
||||
// send an object
|
||||
process.send({ message: "Hello from child as object" });
|
||||
```
|
||||
|
||||
## Blocking API (`Bun.spawnSync()`)
|
||||
|
||||
Bun provides a synchronous equivalent of `Bun.spawn` called `Bun.spawnSync`. This is a blocking API that supports the same inputs and parameters as `Bun.spawn`. It returns a `SyncSubprocess` object, which differs from `Subprocess` in a few ways.
|
||||
|
||||
@@ -99,20 +99,6 @@ const query = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## WAL mode
|
||||
|
||||
SQLite supports [write-ahead log mode](https://www.sqlite.org/wal.html) (WAL) which dramatically improves performance, especially in situations with many concurrent writes. It's broadly recommended to enable WAL mode for most typical applications.
|
||||
|
||||
To enable WAL mode, run this pragma query at the beginning of your application:
|
||||
|
||||
```ts
|
||||
db.exec("PRAGMA journal_mode = WAL;");
|
||||
```
|
||||
|
||||
{% details summary="What is WAL mode" %}
|
||||
In WAL mode, writes to the database are written directly to a separate file called the "WAL file" (write-ahead log). This file will be later integrated into the main database file. Think of it as a buffer for pending writes. Refer to the [SQLite docs](https://www.sqlite.org/wal.html) for a more detailed overview.
|
||||
{% /details %}
|
||||
|
||||
## Statements
|
||||
|
||||
A `Statement` is a _prepared query_, which means it's been parsed and compiled into an efficient binary form. It can be executed multiple times in a performant way.
|
||||
|
||||
@@ -398,7 +398,7 @@ buf; // => Uint8Array(25)
|
||||
compressed; // => Uint8Array(10)
|
||||
```
|
||||
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bungzipsync).
|
||||
The second argument supports the same set of configuration options as [`Bun.gzipSync`](#bun.gzipSync).
|
||||
|
||||
## `Bun.inflateSync()`
|
||||
|
||||
|
||||
@@ -161,7 +161,7 @@ socket.addEventListener("message", event => {
|
||||
|
||||
### Pub/Sub
|
||||
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic (excluding itself). This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
Bun's `ServerWebSocket` implementation implements a native publish-subscribe API for topic-based broadcasting. Individual sockets can `.subscribe()` to a topic (specified with a string identifier) and `.publish()` messages to all other subscribers to that topic. This topic-based broadcast API is similar to [MQTT](https://en.wikipedia.org/wiki/MQTT) and [Redis Pub/Sub](https://redis.io/topics/pubsub).
|
||||
|
||||
```ts
|
||||
const server = Bun.serve<{ username: string }>({
|
||||
@@ -192,7 +192,7 @@ const server = Bun.serve<{ username: string }>({
|
||||
close(ws) {
|
||||
const msg = `${ws.data.username} has left the chat`;
|
||||
ws.unsubscribe("the-group-chat");
|
||||
server.publish("the-group-chat", msg);
|
||||
ws.publish("the-group-chat", msg);
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -200,18 +200,7 @@ const server = Bun.serve<{ username: string }>({
|
||||
console.log(`Listening on ${server.hostname}:${server.port}`);
|
||||
```
|
||||
|
||||
Calling `.publish(data)` will send the message to all subscribers of a topic _except_ the socket that called `.publish()`. To send a message to all subscribers of a topic, use the `.publish()` method on the `Server` instance.
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
websocket: {
|
||||
// ...
|
||||
},
|
||||
});
|
||||
|
||||
// listen for some external event
|
||||
server.publish("the-group-chat", "Hello world");
|
||||
```
|
||||
Calling `.publish(data)` will send the message to all subscribers of a topic _except_ the socket that called `.publish()`.
|
||||
|
||||
### Compression
|
||||
|
||||
|
||||
@@ -328,7 +328,7 @@ Depending on the target, Bun will apply different module resolution rules and op
|
||||
|
||||
All bundles generated with `target: "bun"` are marked with a special `// @bun` pragma, which indicates to the Bun runtime that there's no need to re-transpile the file before execution.
|
||||
|
||||
If any entrypoints contains a Bun shebang (`#!/usr/bin/env bun`) the bundler will default to `target: "bun"` instead of `"browser"`.
|
||||
If any entrypoints contains a Bun shebang (`#!/usr/bin/env bun`) the bundler will default to `target: "bun"` instead of `"browser`.
|
||||
|
||||
---
|
||||
|
||||
@@ -1052,7 +1052,7 @@ $ bun build ./index.tsx --outdir ./out --define 'STRING="value"' --define "neste
|
||||
|
||||
### `loader`
|
||||
|
||||
A map of file extensions to [built-in loader names](https://bun.sh/docs/bundler/loaders#built-in-loaders). This can be used to quickly customize how certain files are loaded.
|
||||
A map of file extensions to [built-in loader names](https://bun.sh/docs/bundler/loaders#built-in-loaders). This can be used to quickly customize how certain file files are loaded.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
@@ -1096,7 +1096,7 @@ interface BuildArtifact extends Blob {
|
||||
The `outputs` array contains all the files that were generated by the build. Each artifact implements the `Blob` interface.
|
||||
|
||||
```ts
|
||||
const build = await Bun.build({
|
||||
const build = Bun.build({
|
||||
/* */
|
||||
});
|
||||
|
||||
@@ -1140,7 +1140,7 @@ Each artifact also contains the following properties:
|
||||
Similar to `BunFile`, `BuildArtifact` objects can be passed directly into `new Response()`.
|
||||
|
||||
```ts
|
||||
const build = await Bun.build({
|
||||
const build = Bun.build({
|
||||
/* */
|
||||
});
|
||||
|
||||
@@ -1156,7 +1156,7 @@ The Bun runtime implements special pretty-printing of `BuildArtifact` object to
|
||||
|
||||
```ts#Build_script
|
||||
// build.ts
|
||||
const build = await Bun.build({/* */});
|
||||
const build = Bun.build({/* */});
|
||||
|
||||
const artifact = build.outputs[0];
|
||||
console.log(artifact);
|
||||
|
||||
155
docs/cli/add.md
155
docs/cli/add.md
@@ -1,155 +0,0 @@
|
||||
To add a particular package:
|
||||
|
||||
```bash
|
||||
$ bun add preact
|
||||
```
|
||||
|
||||
To specify a version, version range, or tag:
|
||||
|
||||
```bash
|
||||
$ bun add zod@3.20.0
|
||||
$ bun add zod@^3.0.0
|
||||
$ bun add zod@latest
|
||||
```
|
||||
|
||||
## `--dev`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `--development`, `-d`, `-D`
|
||||
{% /callout %}
|
||||
|
||||
To add a package as a dev dependency (`"devDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --dev @types/react
|
||||
$ bun add -d @types/react
|
||||
```
|
||||
|
||||
## `--optional`
|
||||
|
||||
To add a package as an optional dependency (`"optionalDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --optional lodash
|
||||
```
|
||||
|
||||
## `--exact`
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
$ bun add react --exact
|
||||
$ bun add react -E
|
||||
```
|
||||
|
||||
This will add the following to your `package.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"dependencies": {
|
||||
// without --exact
|
||||
"react": "^18.2.0", // this matches >= 18.2.0 < 19.0.0
|
||||
|
||||
// with --exact
|
||||
"react": "18.2.0" // this matches only 18.2.0 exactly
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To view a complete list of options for this command:
|
||||
|
||||
```bash
|
||||
$ bun add --help
|
||||
```
|
||||
|
||||
## `--global`
|
||||
|
||||
{% callout %}
|
||||
**Note** — This would not modify package.json of your current project folder.
|
||||
**Alias** - `bun add --global`, `bun add -g`, `bun install --global` and `bun install -g`
|
||||
{% /callout %}
|
||||
|
||||
To install a package globally, use the `-g`/`--global` flag. This will not modify the `package.json` of your current project. Typically this is used for installing command-line tools.
|
||||
|
||||
```bash
|
||||
$ bun add --global cowsay # or `bun add -g cowsay`
|
||||
$ cowsay "Bun!"
|
||||
______
|
||||
< Bun! >
|
||||
------
|
||||
\ ^__^
|
||||
\ (oo)\_______
|
||||
(__)\ )\/\
|
||||
||----w |
|
||||
|| ||
|
||||
```
|
||||
|
||||
{% details summary="Configuring global installation behavior" %}
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# where `bun add --global` installs packages
|
||||
globalDir = "~/.bun/install/global"
|
||||
|
||||
# where globally-installed package bins are linked
|
||||
globalBinDir = "~/.bun/bin"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Trusted dependencies
|
||||
|
||||
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for installed dependencies, such as `postinstall`. These scripts represent a potential security risk, as they can execute arbitrary code on your machine.
|
||||
|
||||
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": ["my-trusted-package"]
|
||||
}
|
||||
```
|
||||
|
||||
Bun reads this field and will run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
<!-- Bun maintains an allow-list of popular packages containing `postinstall` scripts that are known to be safe. To run lifecycle scripts for packages that aren't on this list, add the package to `trustedDependencies` in your package.json. -->
|
||||
|
||||
## Git dependencies
|
||||
|
||||
To add a dependency from a git repository:
|
||||
|
||||
```bash
|
||||
$ bun add git@github.com:moment/moment.git
|
||||
```
|
||||
|
||||
Bun supports a variety of protocols, including [`github`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#github-urls), [`git`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#git-urls-as-dependencies), `git+ssh`, `git+https`, and many more.
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"dayjs": "git+https://github.com/iamkun/dayjs.git",
|
||||
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
|
||||
"moment": "git@github.com:moment/moment.git",
|
||||
"zod": "github:colinhacks/zod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Tarball dependencies
|
||||
|
||||
A package name can correspond to a publicly hosted `.tgz` file. During installation, Bun will download and install the package from the specified tarball URL, rather than from the package registry.
|
||||
|
||||
```sh
|
||||
$ bun add zod@https://registry.npmjs.org/zod/-/zod-3.21.4.tgz
|
||||
```
|
||||
|
||||
This will add the following line to your `package.json`:
|
||||
|
||||
```json#package.json
|
||||
{
|
||||
"dependencies": {
|
||||
"zod": "https://registry.npmjs.org/zod/-/zod-3.21.4.tgz"
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -1,27 +1,3 @@
|
||||
Scaffold an empty Bun project with the interactive `bun init` command.
|
||||
|
||||
```bash
|
||||
$ bun init
|
||||
bun init helps you get started with a minimal project and tries to
|
||||
guess sensible defaults. Press ^C anytime to quit.
|
||||
|
||||
package name (quickstart):
|
||||
entry point (index.ts):
|
||||
|
||||
Done! A package.json file was saved in the current directory.
|
||||
+ index.ts
|
||||
+ .gitignore
|
||||
+ tsconfig.json (for editor auto-complete)
|
||||
+ README.md
|
||||
|
||||
To get started, run:
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
Press `enter` to accept the default answer for each prompt, or pass the `-y` flag to auto-accept the defaults.
|
||||
|
||||
{% details summary="How `bun init` works" %}
|
||||
|
||||
`bun init` is a quick way to start a blank project with Bun. It guesses with sane defaults and is non-destructive when run multiple times.
|
||||
|
||||

|
||||
@@ -37,4 +13,6 @@ If you pass `-y` or `--yes`, it will assume you want to continue without asking
|
||||
|
||||
At the end, it runs `bun install` to install `bun-types`.
|
||||
|
||||
{% /details %}
|
||||
#### How is `bun init` different than `bun create`?
|
||||
|
||||
`bun init` is for blank projects. `bun create` applies templates.
|
||||
@@ -21,7 +21,7 @@ Configuring with `bunfig.toml` is optional. Bun tries to be zero configuration i
|
||||
|
||||
# Scope name The value can be a URL string or an object
|
||||
"@mybigcompany" = { token = "123456", url = "https://registry.mybigcompany.com" }
|
||||
# URL is optional and falls back to the default registry
|
||||
# URL is optional and fallsback to the default registry
|
||||
|
||||
# The "@" in the scope is optional
|
||||
mybigcompany2 = { token = "123456" }
|
||||
@@ -59,8 +59,8 @@ optional = true
|
||||
# Install local devDependencies (default: true)
|
||||
dev = true
|
||||
|
||||
# Install peerDependencies (default: true)
|
||||
peer = true
|
||||
# Install peerDependencies (default: false)
|
||||
peer = false
|
||||
|
||||
# When using `bun install -g`, install packages here
|
||||
globalDir = "~/.bun/install/global"
|
||||
@@ -170,7 +170,7 @@ bun stores normalized `cpu` and `os` values from npm in the lockfile, along with
|
||||
|
||||
## Peer dependencies?
|
||||
|
||||
Peer dependencies are handled similarly to yarn. `bun install` will automatically install peer dependencies. If the dependency is marked optional in `peerDependenciesMeta`, an existing dependency will be chosen if possible.
|
||||
Peer dependencies are handled similarly to yarn. `bun install` does not automatically install peer dependencies and will try to choose an existing dependency.
|
||||
|
||||
## Lockfile
|
||||
|
||||
|
||||
256
docs/cli/create.md
Normal file
256
docs/cli/create.md
Normal file
@@ -0,0 +1,256 @@
|
||||
## `bun init`
|
||||
|
||||
Scaffold an empty project with `bun init`. It's an interactive tool.
|
||||
|
||||
```bash
|
||||
$ bun init
|
||||
bun init helps you get started with a minimal project and tries to
|
||||
guess sensible defaults. Press ^C anytime to quit.
|
||||
|
||||
package name (quickstart):
|
||||
entry point (index.ts):
|
||||
|
||||
Done! A package.json file was saved in the current directory.
|
||||
+ index.ts
|
||||
+ .gitignore
|
||||
+ tsconfig.json (for editor auto-complete)
|
||||
+ README.md
|
||||
|
||||
To get started, run:
|
||||
bun run index.ts
|
||||
```
|
||||
|
||||
Press `enter` to accept the default answer for each prompt, or pass the `-y` flag to auto-accept the defaults.
|
||||
|
||||
## `bun create`
|
||||
|
||||
Template a new Bun project with `bun create`.
|
||||
|
||||
```bash
|
||||
$ bun create <template> <destination>
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note** — You don’t need `bun create` to use Bun. You don’t need any configuration at all. This command exists to make getting started a bit quicker and easier.
|
||||
{% /callout %}
|
||||
|
||||
A template can take a number of forms:
|
||||
|
||||
```bash
|
||||
$ bun create <template> # an official template (remote)
|
||||
$ bun create <username>/<repo> # a GitHub repo (remote)
|
||||
$ bun create <local-template> # a custom template (local)
|
||||
```
|
||||
|
||||
Running `bun create` performs the following steps:
|
||||
|
||||
- Download the template (remote templates only)
|
||||
- Copy all template files into the destination folder. By default Bun will _not overwrite_ any existing files. Use the `--force` flag to overwrite existing files.
|
||||
- Install dependencies with `bun install`.
|
||||
- Initialize a fresh Git repo. Opt out with the `--no-git` flag.
|
||||
- Run the template's configured `start` script, if defined.
|
||||
|
||||
<!-- ## Official templates
|
||||
|
||||
The following official templates are available.
|
||||
|
||||
```bash
|
||||
bun create next ./myapp
|
||||
bun create react ./myapp
|
||||
bun create svelte-kit ./myapp
|
||||
bun create elysia ./myapp
|
||||
bun create hono ./myapp
|
||||
bun create kingworld ./myapp
|
||||
```
|
||||
|
||||
Each of these corresponds to a directory in the [bun-community/create-templates](https://github.com/bun-community/create-templates) repo. If you think a major framework is missing, please open a PR there. This list will change over time as additional examples are added. To see an up-to-date list, run `bun create` with no arguments.
|
||||
|
||||
```bash
|
||||
$ bun create
|
||||
Welcome to bun! Create a new project by pasting any of the following:
|
||||
<list of templates>
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
⚡️ **Speed** — At the time of writing, `bun create react app` runs ~11x faster on a M1 Macbook Pro than `yarn create react-app app`.
|
||||
{% /callout %} -->
|
||||
|
||||
## GitHub repos
|
||||
|
||||
A template of the form `<username>/<repo>` will be downloaded from GitHub.
|
||||
|
||||
```bash
|
||||
$ bun create ahfarmer/calculator ./myapp
|
||||
```
|
||||
|
||||
Complete GitHub URLs will also work:
|
||||
|
||||
```bash
|
||||
$ bun create github.com/ahfarmer/calculator ./myapp
|
||||
$ bun create https://github.com/ahfarmer/calculator ./myapp
|
||||
```
|
||||
|
||||
Bun installs the files as they currently exist current default branch (usually `main`). Unlike `git clone` it doesn't download the commit history or configure a remote.
|
||||
|
||||
## Local templates
|
||||
|
||||
{% callout %}
|
||||
**⚠️ Warning** — Unlike remote templates, running `bun create` with a local template will delete the entire destination folder if it already exists! Be careful.
|
||||
{% /callout %}
|
||||
Bun's templater can be extended to support custom templates defined on your local file system. These templates should live in one of the following directories:
|
||||
|
||||
- `$HOME/.bun-create/<name>`: global templates
|
||||
- `<project root>/.bun-create/<name>`: project-specific templates
|
||||
|
||||
{% callout %}
|
||||
**Note** — You can customize the global template path by setting the `BUN_CREATE_DIR` environment variable.
|
||||
{% /callout %}
|
||||
|
||||
To create a local template, navigate to `$HOME/.bun-create` and create a new directory with the desired name of your template.
|
||||
|
||||
```bash
|
||||
$ cd $HOME/.bun-create
|
||||
$ mkdir foo
|
||||
$ cd foo
|
||||
```
|
||||
|
||||
Then, create a `package.json` file in that directory with the following contents:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "foo"
|
||||
}
|
||||
```
|
||||
|
||||
You can run `bun create foo` elsewhere on your file system to verify that Bun is correctly finding your local template.
|
||||
|
||||
{% table %}
|
||||
|
||||
---
|
||||
|
||||
- `postinstall`
|
||||
- runs after installing dependencies
|
||||
|
||||
---
|
||||
|
||||
- `preinstall`
|
||||
- runs before installing dependencies
|
||||
|
||||
<!-- ---
|
||||
|
||||
- `start`
|
||||
- a command to auto-start the application -->
|
||||
|
||||
{% /table %}
|
||||
|
||||
Each of these can correspond to a string or array of strings. An array of commands will be executed in order. Here is an example:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "@bun-examples/simplereact",
|
||||
"version": "0.0.1",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2"
|
||||
},
|
||||
"bun-create": {
|
||||
"preinstall": "echo 'Installing...'", // a single command
|
||||
"postinstall": ["echo 'Done!'"], // an array of commands
|
||||
"start": "bun run echo 'Hello world!'"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
When cloning a template, `bun create` will automatically remove the `"bun-create"` section from `package.json` before writing it to the destination folder.
|
||||
|
||||
## Reference
|
||||
|
||||
### CLI flags
|
||||
|
||||
{% table %}
|
||||
|
||||
- Flag
|
||||
- Description
|
||||
|
||||
---
|
||||
|
||||
- `--force`
|
||||
- Overwrite existing files
|
||||
|
||||
---
|
||||
|
||||
- `--no-install`
|
||||
- Skip installing `node_modules` & tasks
|
||||
|
||||
---
|
||||
|
||||
- `--no-git`
|
||||
- Don’t initialize a git repository
|
||||
|
||||
---
|
||||
|
||||
- `--open`
|
||||
- Start & open in-browser after finish
|
||||
|
||||
{% /table %}
|
||||
|
||||
### Environment variables
|
||||
|
||||
{% table %}
|
||||
|
||||
- Name
|
||||
- Description
|
||||
|
||||
---
|
||||
|
||||
- `GITHUB_API_DOMAIN`
|
||||
- If you’re using a GitHub enterprise or a proxy, you can customize the GitHub domain Bun pings for downloads
|
||||
|
||||
---
|
||||
|
||||
- `GITHUB_API_TOKEN`
|
||||
- This lets `bun create` work with private repositories or if you get rate-limited
|
||||
|
||||
{% /table %}
|
||||
|
||||
{% details summary="How `bun create` works" %}
|
||||
|
||||
When you run `bun create ${template} ${destination}`, here’s what happens:
|
||||
|
||||
IF remote template
|
||||
|
||||
1. GET `registry.npmjs.org/@bun-examples/${template}/latest` and parse it
|
||||
2. GET `registry.npmjs.org/@bun-examples/${template}/-/${template}-${latestVersion}.tgz`
|
||||
3. Decompress & extract `${template}-${latestVersion}.tgz` into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
IF GitHub repo
|
||||
|
||||
1. Download the tarball from GitHub’s API
|
||||
2. Decompress & extract into `${destination}`
|
||||
|
||||
- If there are files that would overwrite, warn and exit unless `--force` is passed
|
||||
|
||||
ELSE IF local template
|
||||
|
||||
1. Open local template folder
|
||||
2. Delete destination directory recursively
|
||||
3. Copy files recursively using the fastest system calls available (on macOS `fcopyfile` and Linux, `copy_file_range`). Do not copy or traverse into `node_modules` folder if exists (this alone makes it faster than `cp`)
|
||||
|
||||
4. Parse the `package.json` (again!), update `name` to be `${basename(destination)}`, remove the `bun-create` section from the `package.json` and save the updated `package.json` to disk.
|
||||
- IF Next.js is detected, add `bun-framework-next` to the list of dependencies
|
||||
- IF Create React App is detected, add the entry point in /src/index.{js,jsx,ts,tsx} to `public/index.html`
|
||||
- IF Relay is detected, add `bun-macro-relay` so that Relay works
|
||||
5. Auto-detect the npm client, preferring `pnpm`, `yarn` (v1), and lastly `npm`
|
||||
6. Run any tasks defined in `"bun-create": { "preinstall" }` with the npm client
|
||||
7. Run `${npmClient} install` unless `--no-install` is passed OR no dependencies are in package.json
|
||||
8. Run any tasks defined in `"bun-create": { "preinstall" }` with the npm client
|
||||
9. Run `git init; git add -A .; git commit -am "Initial Commit";`
|
||||
|
||||
- Rename `gitignore` to `.gitignore`. NPM automatically removes `.gitignore` files from appearing in packages.
|
||||
- If there are dependencies, this runs in a separate thread concurrently while node_modules are being installed
|
||||
- Using libgit2 if available was tested and performed 3x slower in microbenchmarks
|
||||
|
||||
{% /details %}
|
||||
@@ -9,7 +9,7 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
|
||||
{% /callout %}
|
||||
|
||||
{% details summary="For Linux users" %}
|
||||
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
The minimum Linux Kernel version is 5.1. If you're on Linux kernel 5.1 - 5.5, `bun install` should still work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
|
||||
If you're using Ubuntu 20.04, here's how to install a [newer kernel](https://wiki.ubuntu.com/Kernel/LTSEnablementStack):
|
||||
|
||||
@@ -23,19 +23,43 @@ sudo apt install --install-recommends linux-generic-hwe-20.04
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Manage dependencies
|
||||
|
||||
### `bun install`
|
||||
|
||||
To install all dependencies of a project:
|
||||
|
||||
```bash
|
||||
$ bun install
|
||||
```
|
||||
|
||||
On Linux, `bun install` tends to install packages 20-100x faster than `npm install`. On macOS, it's more like 4-80x.
|
||||
|
||||

|
||||
|
||||
Running `bun install` will:
|
||||
|
||||
- **Install** all `dependencies`, `devDependencies`, and `optionalDependencies`. Bun will install `peerDependencies` by default.
|
||||
- **Install** all `dependencies`, `devDependencies`, and `optionalDependencies`. Bun does not install `peerDependencies` by default.
|
||||
- **Run** your project's `{pre|post}install` and `{pre|post}prepare` scripts at the appropriate time. For security reasons Bun _does not execute_ lifecycle scripts of installed dependencies.
|
||||
- **Write** a `bun.lockb` lockfile to the project root.
|
||||
|
||||
## Logging
|
||||
To install in production mode (i.e. without `devDependencies` or `optionalDependencies`):
|
||||
|
||||
```bash
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
To install with reproducible dependencies, use `--frozen-lockfile`. If your `package.json` disagrees with `bun.lockb`, Bun will exit with an error. This is useful for production builds and CI environments.
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
$ bun install --dry-run
|
||||
```
|
||||
|
||||
To modify logging verbosity:
|
||||
|
||||
@@ -44,113 +68,8 @@ $ bun install --verbose # debug logging
|
||||
$ bun install --silent # no logging
|
||||
```
|
||||
|
||||
## Lifecycle scripts
|
||||
|
||||
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts like `postinstall` for installed dependencies. Executing arbitrary scripts represents a potential security risk.
|
||||
|
||||
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": ["my-trusted-package"]
|
||||
}
|
||||
```
|
||||
|
||||
Then re-install the package. Bun will read this field and run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
## Workspaces
|
||||
|
||||
Bun supports `"workspaces"` in package.json. For complete documentation refer to [Package manager > Workspaces](/docs/install/workspaces).
|
||||
|
||||
```json#package.json
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
"workspaces": ["packages/*"],
|
||||
"dependencies": {
|
||||
"preact": "^10.5.13"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Overrides and resolutions
|
||||
|
||||
Bun supports npm's `"overrides"` and Yarn's `"resolutions"` in `package.json`. These are mechanisms for specifying a version range for _metadependencies_—the dependencies of your dependencies. Refer to [Package manager > Overrides and resolutions](/docs/install/overrides) for complete documentation.
|
||||
|
||||
```json-diff#package.json
|
||||
{
|
||||
"name": "my-app",
|
||||
"dependencies": {
|
||||
"foo": "^2.0.0"
|
||||
},
|
||||
+ "overrides": {
|
||||
+ "bar": "~4.4.0"
|
||||
+ }
|
||||
}
|
||||
```
|
||||
|
||||
## Global packages
|
||||
|
||||
To install a package globally, use the `-g`/`--global` flag. Typically this is used for installing command-line tools.
|
||||
|
||||
```bash
|
||||
$ bun install --global cowsay # or `bun install -g cowsay`
|
||||
$ cowsay "Bun!"
|
||||
______
|
||||
< Bun! >
|
||||
------
|
||||
\ ^__^
|
||||
\ (oo)\_______
|
||||
(__)\ )\/\
|
||||
||----w |
|
||||
|| ||
|
||||
```
|
||||
|
||||
## Production mode
|
||||
|
||||
To install in production mode (i.e. without `devDependencies` or `optionalDependencies`):
|
||||
|
||||
```bash
|
||||
$ bun install --production
|
||||
```
|
||||
|
||||
For reproducible installs, use `--frozen-lockfile`. This will install the exact versions of each package specified in the lockfile. If your `package.json` disagrees with `bun.lockb`, Bun will exit with an error. The lockfile will not be updated.
|
||||
|
||||
```bash
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
For more information on Bun's binary lockfile `bun.lockb`, refer to [Package manager > Lockfile](/docs/install/lockfile).
|
||||
|
||||
## Dry run
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
$ bun install --dry-run
|
||||
```
|
||||
|
||||
## Non-npm dependencies
|
||||
|
||||
Bun supports installing dependencies from Git, GitHub, and local or remotely-hosted tarballs. For complete documentation refer to [Package manager > Git, GitHub, and tarball dependencies](/docs/cli/add).
|
||||
|
||||
```json#package.json
|
||||
{
|
||||
"dependencies": {
|
||||
"dayjs": "git+https://github.com/iamkun/dayjs.git",
|
||||
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
|
||||
"moment": "git@github.com:moment/moment.git",
|
||||
"zod": "github:colinhacks/zod",
|
||||
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
{% details summary="Configuring behavior" %}
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install]
|
||||
@@ -162,7 +81,7 @@ optional = true
|
||||
dev = true
|
||||
|
||||
# whether to install peerDependencies
|
||||
peer = true
|
||||
peer = false
|
||||
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
@@ -174,9 +93,215 @@ frozenLockfile = false
|
||||
dryRun = false
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
### `bun add`
|
||||
|
||||
To add a particular package:
|
||||
|
||||
```bash
|
||||
$ bun add preact
|
||||
```
|
||||
|
||||
To specify a version, version range, or tag:
|
||||
|
||||
```bash
|
||||
$ bun add zod@3.20.0
|
||||
$ bun add zod@^3.0.0
|
||||
$ bun add zod@latest
|
||||
```
|
||||
|
||||
To add a package as a dev dependency (`"devDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --dev @types/react
|
||||
$ bun add -d @types/react
|
||||
```
|
||||
|
||||
To add a package as an optional dependency (`"optionalDependencies"`):
|
||||
|
||||
```bash
|
||||
$ bun add --optional lodash
|
||||
```
|
||||
|
||||
To add a package and pin to the resolved version, use `--exact`. This will resolve the version of the package and add it to your `package.json` with an exact version number instead of a version range.
|
||||
|
||||
```bash
|
||||
$ bun add react --exact
|
||||
```
|
||||
|
||||
This will add the following to your `package.json`:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
"dependencies": {
|
||||
// without --exact
|
||||
"react": "^18.2.0", // this matches >= 18.2.0 < 19.0.0
|
||||
|
||||
// with --exact
|
||||
"react": "18.2.0" // this matches only 18.2.0 exactly
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To install a package globally:
|
||||
|
||||
```bash
|
||||
$ bun add --global cowsay # or `bun add -g cowsay`
|
||||
$ cowsay "Bun!"
|
||||
______
|
||||
< Bun! >
|
||||
------
|
||||
\ ^__^
|
||||
\ (oo)\_______
|
||||
(__)\ )\/\
|
||||
||----w |
|
||||
|| ||
|
||||
```
|
||||
|
||||
{% details summary="Configuring global installation behavior" %}
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# where `bun install --global` installs packages
|
||||
globalDir = "~/.bun/install/global"
|
||||
|
||||
# where globally-installed package bins are linked
|
||||
globalBinDir = "~/.bun/bin"
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
To view a complete list of options for a given command:
|
||||
|
||||
```bash
|
||||
$ bun add --help
|
||||
```
|
||||
|
||||
### `bun remove`
|
||||
|
||||
To remove a dependency:
|
||||
|
||||
```bash
|
||||
$ bun remove preact
|
||||
```
|
||||
|
||||
## Local packages (`bun link`)
|
||||
|
||||
Use `bun link` in a local directory to register the current package as a "linkable" package.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/cool-pkg
|
||||
$ cat package.json
|
||||
{
|
||||
"name": "cool-pkg",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
$ bun link
|
||||
bun link v1.x (7416672e)
|
||||
Success! Registered "cool-pkg"
|
||||
|
||||
To use cool-pkg in a project, run:
|
||||
bun link cool-pkg
|
||||
|
||||
Or add it in dependencies in your package.json file:
|
||||
"cool-pkg": "link:cool-pkg"
|
||||
```
|
||||
|
||||
This package can now be "linked" into other projects using `bun link cool-pkg`. This will create a symlink in the `node_modules` directory of the target project, pointing to the local directory.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/my-app
|
||||
$ bun link cool-pkg
|
||||
```
|
||||
|
||||
In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`:
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
+ "cool-pkg": "link:cool-pkg"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Trusted dependencies
|
||||
|
||||
Unlike other npm clients, Bun does not execute arbitrary lifecycle scripts for installed dependencies, such as `postinstall`. These scripts represent a potential security risk, as they can execute arbitrary code on your machine.
|
||||
|
||||
<!-- Bun maintains an allow-list of popular packages containing `postinstall` scripts that are known to be safe. To run lifecycle scripts for packages that aren't on this list, add the package to `trustedDependencies` in your package.json. -->
|
||||
|
||||
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
|
||||
|
||||
<!-- ```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": {
|
||||
+ "my-trusted-package": "*"
|
||||
+ }
|
||||
}
|
||||
``` -->
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
+ "trustedDependencies": ["my-trusted-package"]
|
||||
}
|
||||
```
|
||||
|
||||
Bun reads this field and will run lifecycle scripts for `my-trusted-package`.
|
||||
|
||||
<!-- If you specify a version range, Bun will only execute lifecycle scripts if the resolved package version matches the range. -->
|
||||
<!--
|
||||
```json
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
"trustedDependencies": {
|
||||
"my-trusted-package": "^1.0.0"
|
||||
}
|
||||
}
|
||||
``` -->
|
||||
|
||||
## Git dependencies
|
||||
|
||||
To add a dependency from a git repository:
|
||||
|
||||
```bash
|
||||
$ bun install git@github.com:moment/moment.git
|
||||
```
|
||||
|
||||
Bun supports a variety of protocols, including [`github`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#github-urls), [`git`](https://docs.npmjs.com/cli/v9/configuring-npm/package-json#git-urls-as-dependencies), `git+ssh`, `git+https`, and many more.
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"dayjs": "git+https://github.com/iamkun/dayjs.git",
|
||||
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
|
||||
"moment": "git@github.com:moment/moment.git",
|
||||
"zod": "github:colinhacks/zod"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Tarball dependencies
|
||||
|
||||
A package name can correspond to a publicly hosted `.tgz` file. During `bun install`, Bun will download and install the package from the specified tarball URL, rather than from the package registry.
|
||||
|
||||
```json#package.json
|
||||
{
|
||||
"dependencies": {
|
||||
"zod": "https://registry.npmjs.org/zod/-/zod-3.21.4.tgz"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## CI/CD
|
||||
|
||||
Looking to speed up your CI? Use the official [`oven-sh/setup-bun`](https://github.com/oven-sh/setup-bun) action to install `bun` in a GitHub Actions pipeline.
|
||||
Looking to speed up your CI? Use the official `oven-sh/setup-bun` action to install `bun` in a GitHub Actions pipeline.
|
||||
|
||||
```yaml#.github/workflows/release.yml
|
||||
name: bun-types
|
||||
|
||||
@@ -1,46 +0,0 @@
|
||||
Use `bun link` in a local directory to register the current package as a "linkable" package.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/cool-pkg
|
||||
$ cat package.json
|
||||
{
|
||||
"name": "cool-pkg",
|
||||
"version": "1.0.0"
|
||||
}
|
||||
$ bun link
|
||||
bun link v1.x (7416672e)
|
||||
Success! Registered "cool-pkg"
|
||||
|
||||
To use cool-pkg in a project, run:
|
||||
bun link cool-pkg
|
||||
|
||||
Or add it in dependencies in your package.json file:
|
||||
"cool-pkg": "link:cool-pkg"
|
||||
```
|
||||
|
||||
This package can now be "linked" into other projects using `bun link cool-pkg`. This will create a symlink in the `node_modules` directory of the target project, pointing to the local directory.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/my-app
|
||||
$ bun link cool-pkg
|
||||
```
|
||||
|
||||
In addition, the `--save` flag can be used to add `cool-pkg` to the `dependencies` field of your app's package.json with a special version specifier that tells Bun to load from the registered local directory instead of installing from `npm`:
|
||||
|
||||
```json-diff
|
||||
{
|
||||
"name": "my-app",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
+ "cool-pkg": "link:cool-pkg"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
To _unregister_ a local package, navigate to the package's root directory and run `bun unlink`.
|
||||
|
||||
```bash
|
||||
$ cd /path/to/cool-pkg
|
||||
$ bun unlink
|
||||
bun unlink v1.x (7416672e)
|
||||
```
|
||||
@@ -1,5 +0,0 @@
|
||||
To remove a dependency:
|
||||
|
||||
```bash
|
||||
$ bun remove ts-node
|
||||
```
|
||||
@@ -72,7 +72,7 @@ $ bun --watch run dev # ✔️ do this
|
||||
$ bun run dev --watch # ❌ don't do this
|
||||
```
|
||||
|
||||
Flags that occur at the end of the command will be ignored and passed through to the `"dev"` script itself.
|
||||
Flags that occur at the end of the command will be ignores and passed through to the `"dev"` script itself.
|
||||
{% /callout %}
|
||||
|
||||
### `--smol`
|
||||
@@ -89,10 +89,6 @@ $ bun --smol run index.tsx
|
||||
Compare to `npm run <script>` or `yarn <script>`
|
||||
{% /note %}
|
||||
|
||||
```sh
|
||||
$ bun [bun flags] run <script> [script flags]
|
||||
```
|
||||
|
||||
Your `package.json` can define a number of named `"scripts"` that correspond to shell commands.
|
||||
|
||||
```jsonc
|
||||
@@ -105,10 +101,10 @@ Your `package.json` can define a number of named `"scripts"` that correspond to
|
||||
}
|
||||
```
|
||||
|
||||
Use `bun run <script>` to execute these scripts.
|
||||
Use `bun <script>` or `bun run <script>` to execute these scripts.
|
||||
|
||||
```bash
|
||||
$ bun run clean
|
||||
$ bun clean
|
||||
$ rm -rf dist && echo 'Done.'
|
||||
Cleaning...
|
||||
Done.
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
To update all dependencies to the latest version _that's compatible with the version range specified in your `package.json`_:
|
||||
|
||||
```sh
|
||||
$ bun update
|
||||
```
|
||||
|
||||
## `--force`
|
||||
|
||||
{% callout %}
|
||||
**Alias** — `-f`
|
||||
{% /callout %}
|
||||
|
||||
Bun by default respect the version rages defined in your package.json, to ignore this and update to the latest version you can pass in the `force` flag.
|
||||
|
||||
```sh
|
||||
$ bun update --force
|
||||
```
|
||||
@@ -49,7 +49,7 @@ This is useful for preventing flash of unstyled content.
|
||||
|
||||
## With `bun bun`
|
||||
|
||||
Bun bundles `.css` files imported via `@import` into a single file. It doesn’t auto-prefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. You’ll have to import those from a `.css` file.
|
||||
Bun bundles `.css` files imported via `@import` into a single file. It doesn’t autoprefix or minify CSS today. Multiple `.css` files imported in one JavaScript file will _not_ be bundled into one file. You’ll have to import those from a `.css` file.
|
||||
|
||||
This input:
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ $ bun create astro
|
||||
│ ◠ ◡ ◠ We're glad to have you on board.
|
||||
╰─────╯
|
||||
|
||||
astro v3.1.4 Launch sequence initiated.
|
||||
astro v2.10.5 Launch sequence initiated.
|
||||
|
||||
dir Where should we create your new project?
|
||||
./fumbling-field
|
||||
@@ -55,17 +55,21 @@ By default, Bun will run the dev server with Node.js. To use the Bun runtime ins
|
||||
|
||||
```sh
|
||||
$ bunx --bun astro dev
|
||||
🚀 astro v3.1.4 started in 200ms
|
||||
🚀 astro v2.10.5 started in 200ms
|
||||
|
||||
┃ Local http://localhost:4321/
|
||||
┃ Local http://localhost:3000/
|
||||
┃ Network use --host to expose
|
||||
|
||||
01:48:34 PM [content] Watching src/content/ for changes
|
||||
01:48:34 PM [content] Types generated
|
||||
01:48:34 PM [astro] update /.astro/types.d.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open [http://localhost:4321](http://localhost:4321) with your browser to see the result. Astro will hot-reload your app as you edit your source files.
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. Astro will hot-reload your app as you edit your source files.
|
||||
|
||||
{% image src="https://i.imgur.com/Dswiu6w.png" caption="An Astro v3 starter app running on Bun" %}
|
||||
{% image src="https://github.com/vitejs/vite/assets/3084745/bb1d5063-32f4-4598-b33e-50b44a1c4e8a" caption="An Astro starter app running on Bun" %}
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -1,140 +0,0 @@
|
||||
---
|
||||
name: Containerize a Bun application with Docker
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
This guide assumes you already have [Docker Desktop](https://www.docker.com/products/docker-desktop/) installed.
|
||||
{% /callout %}
|
||||
|
||||
[Docker](https://www.docker.com) is a platform for packaging and running an application as a lightweight, portable _container_ that encapsulates all the necessary dependencies.
|
||||
|
||||
---
|
||||
|
||||
To _containerize_ our application, we define a `Dockerfile`. This file contains a list of instructions to initialize the container, copy our local project files into it, install dependencies, and starts the application.
|
||||
|
||||
```docker#Dockerfile
|
||||
# use the official Bun image
|
||||
# see all versions at https://hub.docker.com/r/oven/bun/tags
|
||||
FROM oven/bun:1 as base
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# install dependencies into temp directory
|
||||
# this will cache them and speed up future builds
|
||||
FROM base AS install
|
||||
RUN mkdir -p /temp/dev
|
||||
COPY package.json bun.lockb /temp/dev/
|
||||
RUN cd /temp/dev && bun install --frozen-lockfile
|
||||
|
||||
# install with --production (exclude devDependencies)
|
||||
RUN mkdir -p /temp/prod
|
||||
COPY package.json bun.lockb /temp/prod/
|
||||
RUN cd /temp/prod && bun install --frozen-lockfile --production
|
||||
|
||||
# copy node_modules from temp directory
|
||||
# then copy all (non-ignored) project files into the image
|
||||
FROM install AS prerelease
|
||||
COPY --from=install /temp/dev/node_modules node_modules
|
||||
COPY . .
|
||||
|
||||
# [optional] tests & build
|
||||
ENV NODE_ENV=production
|
||||
RUN bun test
|
||||
RUN bun run build
|
||||
|
||||
# copy production dependencies and source code into final image
|
||||
FROM base AS release
|
||||
COPY --from=install /temp/prod/node_modules node_modules
|
||||
COPY --from=prerelease /usr/src/app/index.ts .
|
||||
COPY --from=prerelease /usr/src/app/package.json .
|
||||
|
||||
# run the app
|
||||
USER bun
|
||||
EXPOSE 3000/tcp
|
||||
ENTRYPOINT [ "bun", "run", "index.ts" ]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Now that you have your docker image, let's look at `.dockerignore` which has the same syntax as `.gitignore`, here you need to specify the files/directories that must not go in any stage of the docker build. An example for a ignore file is
|
||||
|
||||
```txt#.dockerignore
|
||||
node_modules
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
.git
|
||||
.gitignore
|
||||
README.md
|
||||
LICENSE
|
||||
.vscode
|
||||
Makefile
|
||||
helm-charts
|
||||
.env
|
||||
.editorconfig
|
||||
.idea
|
||||
coverage*
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We'll now use `docker build` to convert this `Dockerfile` into a _Docker image_, is a self-contained template containing all the dependencies and configuration required to run the application.
|
||||
|
||||
The `-t` flag lets us specify a name for the image, and `--pull` tells Docker to automatically download the latest version of the base image (`oven/bun`). The initial build will take longer, as Docker will download all the base images and dependencies.
|
||||
|
||||
```bash
|
||||
$ docker build --pull -t bun-hello-world .
|
||||
[+] Building 0.9s (21/21) FINISHED
|
||||
=> [internal] load build definition from Dockerfile 0.0s
|
||||
=> => transferring dockerfile: 37B 0.0s
|
||||
=> [internal] load .dockerignore 0.0s
|
||||
=> => transferring context: 35B 0.0s
|
||||
=> [internal] load metadata for docker.io/oven/bun:1 0.8s
|
||||
=> [auth] oven/bun:pull token for registry-1.docker.io 0.0s
|
||||
=> [base 1/2] FROM docker.io/oven/bun:1@sha256:373265748d3cd3624cb3f3ee6004f45b1fc3edbd07a622aeeec17566d2756997 0.0s
|
||||
=> [internal] load build context 0.0s
|
||||
=> => transferring context: 155B 0.0s
|
||||
# ...lots of commands...
|
||||
=> exporting to image 0.0s
|
||||
=> => exporting layers 0.0s
|
||||
=> => writing image sha256:360663f7fdcd6f11e8e94761d5592e2e4dfc8d167f034f15cd5a863d5dc093c4 0.0s
|
||||
=> => naming to docker.io/library/bun-hello-world 0.0s
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We've built a new _Docker image_. Now let's use that image to spin up an actual, running _container_.
|
||||
|
||||
We'll use `docker run` to start a new container using the `bun-hello-world` image. It will be run in _detached_ mode (`-d`) and we'll map the container's port 3000 to our local machine's port 3000 (`-p 3000:3000`).
|
||||
|
||||
The `run` command prints a string representing the _container ID_.
|
||||
|
||||
```sh
|
||||
$ docker run -d -p 3000:3000 bun-hello-world
|
||||
7f03e212a15ede8644379bce11a13589f563d3909a9640446c5bbefce993678d
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The container is now running in the background. Visit [localhost:3000](http://localhost:3000). You should see a `Hello, World!` message.
|
||||
|
||||
---
|
||||
|
||||
To stop the container, we'll use `docker stop <container-id>`.
|
||||
|
||||
```sh
|
||||
$ docker stop 7f03e212a15ede8644379bce11a13589f563d3909a9640446c5bbefce993678d
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
If you can't find the container ID, you can use `docker ps` to list all running containers.
|
||||
|
||||
```sh
|
||||
$ docker ps
|
||||
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
|
||||
7f03e212a15e bun-hello-world "bun run index.ts" 2 minutes ago Up 2 minutes 0.0.0.0:3000->3000/tcp flamboyant_cerf
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
That's it! Refer to the [Docker documentation](https://docs.docker.com/) for more advanced usage.
|
||||
@@ -1,185 +0,0 @@
|
||||
---
|
||||
name: Use Drizzle ORM with Bun
|
||||
---
|
||||
|
||||
Drizzle is an ORM that supports both a SQL-like "query builder" API and an ORM-like [Queries API](https://orm.drizzle.team/docs/rqb). It supports the `bun:sqlite` built-in module.
|
||||
|
||||
---
|
||||
|
||||
Let's get started by creating a fresh project with `bun init` and installing Drizzle.
|
||||
|
||||
```sh
|
||||
$ bun init -y
|
||||
$ bun add drizzle-orm
|
||||
$ bun add -D drizzle-kit
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then we'll connect to a SQLite database using the `bun:sqlite` module and create the Drizzle database instance.
|
||||
|
||||
```ts#db.ts
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const sqlite = new Database("sqlite.db");
|
||||
export const db = drizzle(sqlite);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To see the database in action, add these lines to `index.ts`.
|
||||
|
||||
```ts#index.ts
|
||||
import { db } from "./db";
|
||||
import { sql } from "drizzle-orm";
|
||||
|
||||
const query = sql`select "hello world" as text`;
|
||||
const result = db.get<{ text: string }>(query);
|
||||
console.log(result);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run `index.ts` with Bun. Bun will automatically create `sqlite.db` and execute the query.
|
||||
|
||||
```sh
|
||||
$ bun run index.ts
|
||||
{
|
||||
text: "hello world"
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Lets give our database a proper schema. Create a `schema.ts` file and define a `movies` table.
|
||||
|
||||
```ts#schema.ts
|
||||
import { sqliteTable, text, integer } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const movies = sqliteTable("movies", {
|
||||
id: integer("id").primaryKey(),
|
||||
title: text("name"),
|
||||
releaseYear: integer("release_year"),
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We can use the `drizzle-kit` CLI to generate an initial SQL migration.
|
||||
|
||||
```sh
|
||||
$ bunx drizzle-kit generate:sqlite --schema ./schema.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
This creates a new `drizzle` directory containing a `.sql` migration file and `meta` directory.
|
||||
|
||||
```txt
|
||||
drizzle
|
||||
├── 0000_ordinary_beyonder.sql
|
||||
└── meta
|
||||
├── 0000_snapshot.json
|
||||
└── _journal.json
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We can execute these migrations with a simple `migrate.ts` script.
|
||||
|
||||
This script creates a new connection to a SQLite database that writes to `sqlite.db`, then executes all unexecuted migrations in the `drizzle` directory.
|
||||
|
||||
```ts#migrate.ts
|
||||
import { migrate } from "drizzle-orm/bun-sqlite/migrator";
|
||||
|
||||
import { drizzle } from "drizzle-orm/bun-sqlite";
|
||||
import { Database } from "bun:sqlite";
|
||||
|
||||
const sqlite = new Database("sqlite.db");
|
||||
const db = drizzle(sqlite);
|
||||
await migrate(db, { migrationsFolder: "./drizzle" });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We can run this script with `bun` to execute the migration.
|
||||
|
||||
```sh
|
||||
$ bun run migrate.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Now that we have a database, let's add some data to it. Create a `seed.ts` file with the following contents.
|
||||
|
||||
```ts#seed.ts
|
||||
import { db } from "./db";
|
||||
import * as schema from "./schema";
|
||||
|
||||
await db.insert(schema.movies).values([
|
||||
{
|
||||
title: "The Matrix",
|
||||
releaseYear: 1999,
|
||||
},
|
||||
{
|
||||
title: "The Matrix Reloaded",
|
||||
releaseYear: 2003,
|
||||
},
|
||||
{
|
||||
title: "The Matrix Revolutions",
|
||||
releaseYear: 2003,
|
||||
},
|
||||
]);
|
||||
|
||||
console.log(`Seeding complete.`);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run this file.
|
||||
|
||||
```sh
|
||||
$ bun run seed.ts
|
||||
Seeding complete.
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We finally have a database with a schema and some sample data. Let's use Drizzle to query it. Replace the contents of `index.ts` with the following.
|
||||
|
||||
```ts#index.ts
|
||||
import * as schema from "./schema";
|
||||
import { db } from "./db";
|
||||
|
||||
const result = await db.select().from(schema.movies);
|
||||
console.log(result);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run the file. You should see the three movies we inserted.
|
||||
|
||||
```sh
|
||||
$ bun run index.ts
|
||||
bun run index.ts
|
||||
[
|
||||
{
|
||||
id: 1,
|
||||
title: "The Matrix",
|
||||
releaseYear: 1999
|
||||
}, {
|
||||
id: 2,
|
||||
title: "The Matrix Reloaded",
|
||||
releaseYear: 2003
|
||||
}, {
|
||||
id: 3,
|
||||
title: "The Matrix Revolutions",
|
||||
releaseYear: 2003
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Refer to the [Drizzle website](https://orm.drizzle.team/docs/overview) for complete documentation.
|
||||
@@ -1,227 +0,0 @@
|
||||
---
|
||||
name: Use EdgeDB with Bun
|
||||
---
|
||||
|
||||
EdgeDB is a graph-relational database powered by Postgres under the hood. It provides a declarative schema language, migrations system, and object-oriented query language, in addition to supporting raw SQL queries. It solves the object-relational mapping problem at the database layer, eliminating the need for an ORM library in your application code.
|
||||
|
||||
---
|
||||
|
||||
First, [install EdgeDB](https://www.edgedb.com/install) if you haven't already.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```sh#Linux/macOS
|
||||
$ curl --proto '=https' --tlsv1.2 -sSf https://sh.edgedb.com | sh
|
||||
```
|
||||
|
||||
```sh#Windows
|
||||
$ iwr https://ps1.edgedb.com -useb | iex
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
---
|
||||
|
||||
Use `bun init` to create a fresh project.
|
||||
|
||||
```sh
|
||||
$ mkdir my-edgedb-app
|
||||
$ cd my-edgedb-app
|
||||
$ bun init -y
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We'll use the EdgeDB CLI to initialize an EdgeDB instance for our project. This creates an `edgedb.toml` file in our project root.
|
||||
|
||||
```sh
|
||||
$ edgedb project init
|
||||
No `edgedb.toml` found in `/Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app` or above
|
||||
Do you want to initialize a new project? [Y/n]
|
||||
> Y
|
||||
Specify the name of EdgeDB instance to use with this project [default: my_edgedb_app]:
|
||||
> my_edgedb_app
|
||||
Checking EdgeDB versions...
|
||||
Specify the version of EdgeDB to use with this project [default: x.y]:
|
||||
> x.y
|
||||
┌─────────────────────┬────────────────────────────────────────────────────────────────────────┐
|
||||
│ Project directory │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app │
|
||||
│ Project config │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/edgedb.toml │
|
||||
│ Schema dir (empty) │ /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/dbschema │
|
||||
│ Installation method │ portable package │
|
||||
│ Version │ x.y+6d5921b │
|
||||
│ Instance name │ my_edgedb_app │
|
||||
└─────────────────────┴────────────────────────────────────────────────────────────────────────┘
|
||||
Version x.y+6d5921b is already downloaded
|
||||
Initializing EdgeDB instance...
|
||||
Applying migrations...
|
||||
Everything is up to date. Revision initial
|
||||
Project initialized.
|
||||
To connect to my_edgedb_app, run `edgedb`
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To see if the database is running, let's open a REPL and run a simple query.
|
||||
|
||||
Then run `\quit` to exit the REPL.
|
||||
|
||||
```sh
|
||||
$ edgedb
|
||||
edgedb> select 1 + 1;
|
||||
2
|
||||
edgedb> \quit
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
With the project initialized, we can define a schema. The `edgedb project init` command already created a `dbschema/default.esdl` file to contain our schema.
|
||||
|
||||
```txt
|
||||
dbschema
|
||||
├── default.esdl
|
||||
└── migrations
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Open that file and paste the following contents.
|
||||
|
||||
```txt
|
||||
module default {
|
||||
type Movie {
|
||||
title: str;
|
||||
releaseYear: int64;
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then generate and apply an initial migration.
|
||||
|
||||
```sh
|
||||
$ edgedb migration create
|
||||
Created /Users/colinmcd94/Documents/bun/fun/examples/my-edgedb-app/dbschema/migrations/00001.edgeql, id: m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq
|
||||
$ edgedb migrate
|
||||
Applied m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq (00001.edgeql)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
With our schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file.
|
||||
|
||||
```sh
|
||||
$ bun add edgedb
|
||||
$ bun add -D @edgedb/generate
|
||||
$ touch seed.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Paste the following code into `seed.ts`.
|
||||
|
||||
The client auto-connects to the database. We insert a couple movies using the `.execute()` method.
|
||||
|
||||
```ts
|
||||
import { createClient } from "edgedb";
|
||||
|
||||
const client = createClient();
|
||||
|
||||
const INSERT_MOVIE = `
|
||||
insert Movie {
|
||||
title := <str>$title,
|
||||
releaseYear := <int64>$year,
|
||||
}
|
||||
`;
|
||||
|
||||
const movies = [
|
||||
{ title: "The Matrix", year: 1999 },
|
||||
{ title: "The Matrix Reloaded", year: 2003 },
|
||||
{ title: "The Matrix Revolutions", year: 2003 },
|
||||
];
|
||||
|
||||
for (const movie of movies) {
|
||||
await client.execute(INSERT_MOVIE, movie);
|
||||
}
|
||||
|
||||
console.log(`Seeding complete.`);
|
||||
process.exit();
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then run this file with Bun.
|
||||
|
||||
```sh
|
||||
$ bun run seed.ts
|
||||
Seeding complete.
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
EdgeDB implements a number of code generation tools for TypeScript. To query our newly seeded database in a typesafe way, we'll use `@edgedb/generate` to code-generate the EdgeQL query builder.
|
||||
|
||||
```sh
|
||||
$ bunx @edgedb/generate edgeql-js
|
||||
Generating query builder...
|
||||
Detected tsconfig.json, generating TypeScript files.
|
||||
To override this, use the --target flag.
|
||||
Run `npx @edgedb/generate --help` for full options.
|
||||
Introspecting database schema...
|
||||
Writing files to ./dbschema/edgeql-js
|
||||
Generation complete! 🤘
|
||||
Checking the generated query builder into version control
|
||||
is not recommended. Would you like to update .gitignore to ignore
|
||||
the query builder directory? The following line will be added:
|
||||
|
||||
dbschema/edgeql-js
|
||||
|
||||
[y/n] (leave blank for "y")
|
||||
> y
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
In `index.ts`, we can import the generated query builder from `./dbschema/edgeql-js` and write a simple select query.
|
||||
|
||||
```ts
|
||||
import { createClient } from "edgedb";
|
||||
import e from "./dbschema/edgeql-js";
|
||||
|
||||
const client = createClient();
|
||||
|
||||
const query = e.select(e.Movie, () => ({
|
||||
title: true,
|
||||
releaseYear: true,
|
||||
}));
|
||||
|
||||
const results = await query.run(client);
|
||||
console.log(results);
|
||||
|
||||
results; // { title: string, releaseYear: number | null }[]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Running the file with Bun, we can see the list of movies we inserted.
|
||||
|
||||
```sh
|
||||
$ bun run index.ts
|
||||
[
|
||||
{
|
||||
title: "The Matrix",
|
||||
releaseYear: 1999
|
||||
}, {
|
||||
title: "The Matrix Reloaded",
|
||||
releaseYear: 2003
|
||||
}, {
|
||||
title: "The Matrix Revolutions",
|
||||
releaseYear: 2003
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For complete documentation, refer to the [EdgeDB docs](https://www.edgedb.com/docs).
|
||||
@@ -1,54 +0,0 @@
|
||||
---
|
||||
name: Run Bun as a daemon with PM2
|
||||
---
|
||||
|
||||
[PM2](https://pm2.keymetrics.io/) is a popular process manager that manages and runs your applications as daemons (background processes).
|
||||
|
||||
It offers features like process monitoring, automatic restarts, and easy scaling. Using a process manager is common when deploying a Bun application on a cloud-hosted virtual private server (VPS), as it:
|
||||
|
||||
- Keeps your Node.js application running continuously.
|
||||
- Ensure high availability and reliability of your application.
|
||||
- Monitor and manage multiple processes with ease.
|
||||
- Simplify the deployment process.
|
||||
|
||||
---
|
||||
|
||||
You can use PM2 with Bun in two ways: as a CLI option or in a configuration file.
|
||||
|
||||
### With `--interpreter`
|
||||
|
||||
---
|
||||
|
||||
To start your application with PM2 and Bun as the interpreter, open your terminal and run the following command:
|
||||
|
||||
```bash
|
||||
pm2 start --interpreter ~/.bun/bin/bun index.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### With a configuration file
|
||||
|
||||
---
|
||||
|
||||
Alternatively, you can create a PM2 configuration file. Create a file named `pm2.config.js` in your project directory and add the following content.
|
||||
|
||||
```javascript
|
||||
module.exports = {
|
||||
name: "app", // Name of your application
|
||||
script: "index.ts", // Entry point of your application
|
||||
interpreter: "~/.bun/bin/bun", // Path to the Bun interpreter
|
||||
};
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
After saving the file, you can start your application with PM2
|
||||
|
||||
```bash
|
||||
pm2 start pm2.config.js
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
That’s it! Your JavaScript/TypeScript web server is now running as a daemon with PM2 using Bun as the interpreter.
|
||||
@@ -1,27 +1,21 @@
|
||||
---
|
||||
name: Use Prisma with Bun
|
||||
---
|
||||
|
||||
{% callout %}
|
||||
**Note** — At the moment Prisma needs Node.js to be installed to run certain generation code. Make sure Node.js is installed in the environment where you're running `bunx prisma` commands.
|
||||
{% /callout %}
|
||||
|
||||
name: Get started using Prisma
|
||||
---
|
||||
|
||||
Prisma works out of the box with Bun. First, create a directory and initialize it with `bun init`.
|
||||
|
||||
```bash
|
||||
$ mkdir prisma-app
|
||||
$ cd prisma-app
|
||||
$ bun init
|
||||
mkdir prisma-app
|
||||
cd prisma-app
|
||||
bun init
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then install the Prisma CLI (`prisma`) and Prisma Client (`@prisma/client`) as dependencies.
|
||||
Then add Prisma as a dependency.
|
||||
|
||||
```bash
|
||||
$ bun add prisma @prisma/client
|
||||
bun add prisma
|
||||
```
|
||||
|
||||
---
|
||||
@@ -29,7 +23,7 @@ $ bun add prisma @prisma/client
|
||||
We'll use the Prisma CLI with `bunx` to initialize our schema and migration directory. For simplicity we'll be using an in-memory SQLite database.
|
||||
|
||||
```bash
|
||||
$ bunx prisma init --datasource-provider sqlite
|
||||
bunx prisma init --datasource-provider sqlite
|
||||
```
|
||||
|
||||
---
|
||||
@@ -60,37 +54,14 @@ Then generate and run initial migration.
|
||||
This will generate a `.sql` migration file in `prisma/migrations`, create a new SQLite instance, and execute the migration against the new instance.
|
||||
|
||||
```bash
|
||||
$ bunx prisma migrate dev --name init
|
||||
Environment variables loaded from .env
|
||||
Prisma schema loaded from prisma/schema.prisma
|
||||
Datasource "db": SQLite database "dev.db" at "file:./dev.db"
|
||||
|
||||
SQLite database dev.db created at file:./dev.db
|
||||
|
||||
Applying migration `20230928182242_init`
|
||||
|
||||
The following migration(s) have been created and applied from new schema changes:
|
||||
|
||||
migrations/
|
||||
└─ 20230928182242_init/
|
||||
└─ migration.sql
|
||||
|
||||
Your database is now in sync with your schema.
|
||||
|
||||
✔ Generated Prisma Client (v5.3.1) to ./node_modules/@prisma/client in 41ms
|
||||
bunx prisma migrate dev --name init
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
As indicated in the output, Prisma re-generates our _Prisma client_ whenever we execute a new migration. The client provides a fully typed API for reading and writing from our database. You can manually re-generate the client with the Prisma CLI.
|
||||
Prisma automatically generates our _Prisma client_ whenever we execute a new migration. The client provides a fully typed API for reading and writing from our database.
|
||||
|
||||
```sh
|
||||
$ bunx prisma generate
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
We can import the generated client from `@prisma/client`.
|
||||
It can be imported from `@prisma/client`.
|
||||
|
||||
```ts#src/index.ts
|
||||
import {PrismaClient} from "@prisma/client";
|
||||
|
||||
@@ -4,7 +4,7 @@ name: Use React and JSX
|
||||
|
||||
React just works with Bun. Bun supports `.jsx` and `.tsx` files out of the box.
|
||||
|
||||
Remember that JSX is just a special syntax for including HTML-like syntax in JavaScript files. React uses JSX syntax, as do alternatives like [Preact](https://preactjs.com/) and [Solid](https://www.solidjs.com/). Bun's internal transpiler converts JSX syntax into vanilla JavaScript before execution.
|
||||
Remember that JSX is just a special syntax for including HTML-like syntax in JavaScript files. It's commonReact uses JSX syntax, as do other React alternatives like [Preact](https://preactjs.com/) and [Solid](https://www.solidjs.com/). Bun's internal transpiler converts JSX syntax into vanilla JavaScript before execution.
|
||||
|
||||
---
|
||||
|
||||
@@ -27,7 +27,7 @@ const element = jsx("h1", { children: "Hello, world!" });
|
||||
|
||||
---
|
||||
|
||||
This code requires `react` to run, so make sure you've installed React.
|
||||
This code requires `react` to run, so make sure you you've installed React.
|
||||
|
||||
```bash
|
||||
$ bun install react
|
||||
|
||||
@@ -18,7 +18,7 @@ $ bun create remix
|
||||
dir Where should we create your new project?
|
||||
./my-app
|
||||
|
||||
◼ Using basic template See https://remix.run/docs/en/main/guides/templates#templates for more
|
||||
◼ Using basic template See https://remix.run/docs/pages/templates for more
|
||||
✔ Template copied
|
||||
|
||||
git Initialize a new git repository?
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
---
|
||||
name: Run Bun as a daemon with systemd
|
||||
---
|
||||
|
||||
[systemd](https://systemd.io) is an init system and service manager for Linux operating systems that manages the startup and control of system processes and services.
|
||||
|
||||
<!-- systemd provides aggressive parallelization capabilities, uses socket and D-Bus activation for starting services, offers on-demand starting of daemons, keeps track of processes using Linux control groups, maintains mount and auto mount points, and implements an elaborate transactional dependency-based service control logic. systemd supports SysV and LSB init scripts and works as a replacement for sysvinit. -->
|
||||
|
||||
<!-- Other parts include a logging daemon, utilities to control basic system configuration like the hostname, date, locale, maintain a list of logged-in users and running containers and virtual machines, system accounts, runtime directories and settings, and daemons to manage simple network configuration, network time synchronization, log forwarding, and name resolution. -->
|
||||
|
||||
---
|
||||
|
||||
To run a Bun application as a daemon using **systemd** you'll need to create a _service file_ in `/lib/systemd/system/`.
|
||||
|
||||
```sh
|
||||
$ cd /lib/systemd/system
|
||||
$ touch my-app.service
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Here is a typical service file that runs an application on system start. You can use this as a template for your own service. Replace `YOUR_USER` with the name of the user you want to run the application as. To run as `root`, replace `YOUR_USER` with `root`, though this is generally not recommended for security reasons.
|
||||
|
||||
Refer to the [systemd documentation](https://www.freedesktop.org/software/systemd/man/systemd.service.html) for more information on each setting.
|
||||
|
||||
```ini#my-app.service
|
||||
[Unit]
|
||||
# describe the app
|
||||
Description=My App
|
||||
# start the app after the network is available
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
# usually you'll use 'simple'
|
||||
# one of https://www.freedesktop.org/software/systemd/man/systemd.service.html#Type=
|
||||
Type=simple
|
||||
# which user to use when starting the app
|
||||
User=YOUR_USER
|
||||
# path to your application's root directory
|
||||
WorkingDirectory=/home/YOUR_USER/path/to/my-app
|
||||
# the command to start the app
|
||||
# requires absolute paths
|
||||
ExecStart=/home/YOUR_USER/.bun/bin/bun run index.ts
|
||||
# restart policy
|
||||
# one of {no|on-success|on-failure|on-abnormal|on-watchdog|on-abort|always}
|
||||
Restart=always
|
||||
|
||||
[Install]
|
||||
# start the app automatically
|
||||
WantedBy=multi-user.target
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
If your application starts a webserver, note that non-`root` users are not able to listen on ports 80 or 443 by default. To permanently allow Bun to listen on these ports when executed by a non-`root` user, use the following command. This step isn't necessary when running as `root`.
|
||||
|
||||
```bash
|
||||
$ sudo setcap CAP_NET_BIND_SERVICE=+eip ~/.bun/bin/bun
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
With the service file configured, you can now _enable_ the service. Once enabled, it will start automatically on reboot. This requires `sudo` permissions.
|
||||
|
||||
```bash
|
||||
$ sudo systemctl enable my-app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To start the service without rebooting, you can manually _start_ it.
|
||||
|
||||
```bash
|
||||
$ sudo systemctl start my-app
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Check the status of your application with `systemctl status`. If you've started your app successfully, you should see something like this:
|
||||
|
||||
```bash
|
||||
$ sudo systemctl status my-app
|
||||
● my-app.service - My App
|
||||
Loaded: loaded (/lib/systemd/system/my-app.service; enabled; preset: enabled)
|
||||
Active: active (running) since Thu 2023-10-12 11:34:08 UTC; 1h 8min ago
|
||||
Main PID: 309641 (bun)
|
||||
Tasks: 3 (limit: 503)
|
||||
Memory: 40.9M
|
||||
CPU: 1.093s
|
||||
CGroup: /system.slice/my-app.service
|
||||
└─309641 /home/YOUR_USER/.bun/bin/bun run /home/YOUR_USER/application/index.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
To update the service, edit the contents of the service file, then reload the daemon.
|
||||
|
||||
```bash
|
||||
$ sudo systemctl daemon-reload
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For a complete guide on the service unit configuration, you can check [this page](https://www.freedesktop.org/software/systemd/man/systemd.service.html). Or refer to this cheatsheet of common commands:
|
||||
|
||||
```bash
|
||||
$ sudo systemctl daemon-reload # tell systemd that some files got changed
|
||||
$ sudo systemctl enable my-app # enable the app (to allow auto-start)
|
||||
$ sudo systemctl disable my-app # disable the app (turns off auto-start)
|
||||
$ sudo systemctl start my-app # start the app if is stopped
|
||||
$ sudo systemctl stop my-app # stop the app
|
||||
$ sudo systemctl restart my-app # restart the app
|
||||
```
|
||||
@@ -30,7 +30,8 @@ bun install
|
||||
|
||||
Start the development server with the `vite` CLI using `bunx`.
|
||||
|
||||
The `--bun` flag tells Bun to run Vite's CLI using `bun` instead of `node`; by default Bun respects Vite's `#!/usr/bin/env node` [shebang line](<https://en.wikipedia.org/wiki/Shebang_(Unix)>).
|
||||
The `--bun` flag tells Bun to run Vite's CLI using `bun` instead of `node`; by default Bun respects Vite's `#!/usr/bin/env node` [shebang line](<https://en.wikipedia.org/wiki/Shebang_(Unix)>). After Bun 1.0 this flag will no longer be necessary.
|
||||
|
||||
```bash
|
||||
bunx --bun vite
|
||||
```
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: Add a peer dependency
|
||||
---
|
||||
|
||||
To add an npm package as a peer dependency, directly modify the `peerDependencies` object in your package.json. Running `bun install` will install peer dependencies by default, unless marked optional in `peerDependenciesMeta`.
|
||||
To add an npm package as a peer dependency, directly modify the `peerDependencies` object in your package.json. Running `bun install` will not install peer dependencies.
|
||||
|
||||
```json-diff
|
||||
{
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# ...
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v3
|
||||
+ - uses: oven-sh/setup-bun@v1
|
||||
|
||||
# run any `bun` or `bunx` command
|
||||
|
||||
@@ -47,4 +47,4 @@ Note that this only allows lifecycle scripts for the specific package listed in
|
||||
|
||||
---
|
||||
|
||||
See [Docs > Package manager > Trusted dependencies](/docs/install/lifecycle) for complete documentation of trusted dependencies.
|
||||
See [Docs > Package manager > Trusted dependencies](/docs/cli/install#trusted-dependencies) for complete documentation of trusted dependencies.
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
---
|
||||
name: Spawn a child process and communicate using IPC
|
||||
---
|
||||
|
||||
Use [`Bun.spawn()`](/docs/api/spawn) to spawn a child process. When spawning a second `bun` process, you can open a direct inter-process communication (IPC) channel between the two processes.
|
||||
|
||||
{%callout%}
|
||||
**Note** — This API is only compatible with other `bun` processes. Use `process.execPath` to get a path to the currently running `bun` executable.
|
||||
{%/callout%}
|
||||
|
||||
```ts#parent.ts
|
||||
const child = Bun.spawn(["bun", "child.ts"], {
|
||||
ipc(message) {
|
||||
/**
|
||||
* The message received from the sub process
|
||||
**/
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
The parent process can send messages to the subprocess using the `.send()` method on the returned `Subprocess` instance. A reference to the sending subprocess is also available as the second argument in the `ipc` handler.
|
||||
|
||||
```ts#parent.ts
|
||||
const childProc = Bun.spawn(["bun", "child.ts"], {
|
||||
ipc(message, childProc) {
|
||||
/**
|
||||
* The message received from the sub process
|
||||
**/
|
||||
childProc.send("Respond to child")
|
||||
},
|
||||
});
|
||||
|
||||
childProc.send("I am your father"); // The parent can send messages to the child as well
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Meanwhile the child process can send messages to its parent using with `process.send()` and receive messages with `process.on("message")`. This is the same API used for `child_process.fork()` in Node.js.
|
||||
|
||||
```ts#child.ts
|
||||
process.send("Hello from child as string");
|
||||
process.send({ message: "Hello from child as object" });
|
||||
|
||||
process.on("message", (message) => {
|
||||
// print message from parent
|
||||
console.log(message);
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
All messages are serialized using the JSC `serialize` API, which allows for the same set of [transferrable types](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Transferable_objects) supported by `postMessage` and `structuredClone`, including strings, typed arrays, streams, and objects.
|
||||
|
||||
```ts#child.ts
|
||||
// send a string
|
||||
process.send("Hello from child as string");
|
||||
|
||||
// send an object
|
||||
process.send({ message: "Hello from child as object" });
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > Child processes](/docs/api/spawn) for complete documentation.
|
||||
@@ -8,7 +8,7 @@ The `Bun.file()` function accepts a path and returns a `BunFile` instance. The `
|
||||
const path = "/path/to/package.json";
|
||||
const file = Bun.file(path);
|
||||
|
||||
const stream = file.stream();
|
||||
const stream = await file.stream();
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -35,7 +35,7 @@ jobs:
|
||||
# ...
|
||||
- uses: oven-sh/setup-bun@v1
|
||||
+ with:
|
||||
+ bun-version: 1.0.11 # or "latest", "canary", <sha>
|
||||
+ version: 0.7.0 # or "canary"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -2,12 +2,6 @@
|
||||
name: Debugging Bun with the VS Code extension
|
||||
---
|
||||
|
||||
{% note %}
|
||||
|
||||
VSCode extension support is currently buggy. We recommend the [Web Debugger](https://bun.sh/guides/runtime/web-debugger) for now.
|
||||
|
||||
{% /note %}
|
||||
|
||||
Bun speaks the [WebKit Inspector Protocol](https://github.com/oven-sh/bun/blob/main/packages/bun-vscode/types/jsc.d.ts) so you can debug your code with an interactive debugger.
|
||||
|
||||
---
|
||||
|
||||
@@ -32,6 +32,7 @@ Some notable missing features:
|
||||
|
||||
- `expect.extend()`
|
||||
- `expect().toMatchInlineSnapshot()`
|
||||
- `expect().toHaveBeenCalledWith()`
|
||||
- `expect().toHaveReturned()`
|
||||
|
||||
---
|
||||
|
||||
@@ -20,7 +20,7 @@ test("party like it's 1999", () => {
|
||||
|
||||
---
|
||||
|
||||
The `setSystemTime` function is commonly used on conjunction with [Lifecycle Hooks](/docs/test/lifecycle) to configure a testing environment with a deterministic "fake clock".
|
||||
The `setSystemTime` function is commonly used on conjunction with [Lifecycle Hooks](/docs/test/lifecycle) to configure a testing environment with a determinstic "fake clock".
|
||||
|
||||
```ts
|
||||
import { test, expect, beforeAll, setSystemTime } from "bun:test";
|
||||
|
||||
@@ -64,7 +64,7 @@ Ran 2 tests across 1 files. [15.00ms]
|
||||
All tests have a name, defined using the first parameter to the `test` function. Tests can also be grouped into suites with `describe`.
|
||||
|
||||
```ts
|
||||
import { test, expect, describe } from "bun:test";
|
||||
import { test, expect } from "bun:test";
|
||||
|
||||
describe("math", () => {
|
||||
test("add", () => {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: Get the file name of the current file
|
||||
---
|
||||
|
||||
Bun provides a handful of module-specific utilities on the [`import.meta`](/docs/api/import-meta) object. Use `import.meta.file` to retrieve the name of the current file.
|
||||
Bun provides a handful of module-specific utilities on the [`import.meta`](/docs/api/import-meta) object. Use `import.meta.file` to retreive the name of the current file.
|
||||
|
||||
```ts#/a/b/c.ts
|
||||
import.meta.file; // => "c.ts"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: Get the absolute path of the current file
|
||||
---
|
||||
|
||||
Bun provides a handful of module-specific utilities on the [`import.meta`](/docs/api/import-meta) object. Use `import.meta.path` to retrieve the absolute path of the current file.
|
||||
Bun provides a handful of module-specific utilities on the [`import.meta`](/docs/api/import-meta) object. Use `import.meta.path` to retreive the absolute path of the current file.
|
||||
|
||||
```ts#/a/b/c.ts
|
||||
import.meta.path; // => "/a/b/c.ts"
|
||||
|
||||
@@ -47,7 +47,7 @@ Bun.serve<WebSocketData>({
|
||||
// use a library to parse cookies
|
||||
const cookies = parseCookies(req.headers.get("Cookie"));
|
||||
const token = cookies["X-Token"];
|
||||
const user = await getUserFromToken(token);
|
||||
const user = await getUserFromToken(ws.data.authToken);
|
||||
|
||||
const upgraded = server.upgrade(req, {
|
||||
data: {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user