Compare commits

..

68 Commits

Author SHA1 Message Date
Meghan Denny
27f4389b4c wip: implement CompressionStream and DecompressionStream globals 2024-09-30 20:33:38 -07:00
dave caruso
9ab51983b8 chore: make watcher use an anyopaque pointer context (#14224) 2024-09-28 01:44:55 -07:00
Jarred Sumner
af82a446df Support hmac in Bun.CryptoHasher (#14210) 2024-09-27 23:33:22 -07:00
190n
dd12715071 Propagate exceptions in napi_run_script (#14222) 2024-09-27 22:27:57 -07:00
dave caruso
514d37b3d2 kit: implement server components dev server (#14025) 2024-09-27 20:53:39 -07:00
Jarred Sumner
d09df1af47 Deflake a test 2024-09-27 14:22:59 -07:00
Meghan Denny
05afe42f31 fix Buffer.fill with a non-null empty fill including uninitialized bytes (#14199) 2024-09-27 02:50:32 -07:00
Meghan Denny
123b5219e0 include code in detached buffer error for Buffer.isUtf8 and Buffer.isAscii (#14197) 2024-09-27 02:49:38 -07:00
Meghan Denny
7113206a7d fix MAX_STRING_LENGTH constant value (#14196) 2024-09-27 02:48:04 -07:00
Andres Kalle
89fc3ef34d types: clarified parameter name (#14209) 2024-09-27 02:09:37 -07:00
Meghan Denny
392a58b0ed remove duplicate root.h includes (#14194) 2024-09-26 22:01:05 -07:00
Ciro Spaciari
02fb802b25 add req.json leak test (#14191) 2024-09-26 22:00:40 -07:00
Meghan Denny
69d33bb1d0 fix small leak in node:process.execArgv getter (#14154) 2024-09-26 17:10:14 -07:00
190n
4e51f7d85b Refactoring and bug fixes in the V8 API (#13754)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-09-26 16:54:37 -07:00
Robert Shuford
5e97fb8d97 Support reading from $HOME/.npmrc (#13990) 2024-09-26 14:41:28 -07:00
Meghan Denny
d42c032eec cleanup some error handling in BunProcess (#14178) 2024-09-26 14:20:35 -07:00
Jarred Sumner
afe974a175 Update color.md 2024-09-26 13:47:09 -07:00
Zack Radisic
274e5a2022 CSS Parser (#14122)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-09-26 13:39:26 -07:00
Jarred Sumner
18822b9f45 Support AbortSignal in Bun.spawn (#14180) 2024-09-26 10:54:54 -07:00
Meghan Denny
7b058e24ff fix memory leak in Bun.shellEscape return value (#14130) 2024-09-25 18:13:28 -07:00
Meghan Denny
ec7078a006 dont leak the address string in UDPSocket.addressToString (#14127) 2024-09-25 18:12:32 -07:00
snwy
af12ff104a fix utf8 handling when importing json (#14168) 2024-09-25 17:50:11 -07:00
Meghan Denny
80db770521 rework node:zlib to match internal js api and properly support async writes (#14079)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-09-25 15:55:53 -07:00
Meghan Denny
c4c3019cb0 no need to cache protocol in NewServer when its statically known (#14128) 2024-09-25 14:47:50 -07:00
Ashcon Partovi
1f0f666210 Fix zig build again (#14165) 2024-09-25 13:02:56 -07:00
Ashcon Partovi
73f90c3359 Remove unused .docker directory 2024-09-25 12:43:47 -07:00
Ashcon Partovi
291a50aff5 Fix zig build (#14163) 2024-09-25 12:42:47 -07:00
Ashcon Partovi
128c658f91 Use ephemeral vendor path for now 2024-09-25 11:19:42 -07:00
Ashcon Partovi
a87341b239 Fix download zig script 2024-09-25 10:55:22 -07:00
Ashcon Partovi
3ab990e615 cmake: Fix zig build issue 2024-09-25 10:45:43 -07:00
Ashcon Partovi
ecf5d79e01 bun run clang-tidy (#14162) 2024-09-25 10:31:38 -07:00
Ashcon Partovi
b9a56a6087 cmake: Add target to download Node.js headers 2024-09-25 09:39:06 -07:00
Jarred Sumner
5722ae8d04 Make prototype pollution attacks harder in most Bun APIs that accept objects (#14119) 2024-09-25 01:16:29 -07:00
Meghan Denny
2856267fda add missing defers to JSBundler.Plugin.hasAnyMatches (#14129) 2024-09-25 01:15:32 -07:00
Meghan Denny
da70c891df dont leak the message when node:util.getSystemErrorName is passed an invalid code (#14126) 2024-09-25 01:13:22 -07:00
Wilmer Paulino
6f27b5559d Switch RSA asymmetric sign implementation to BoringSSL (#14125) 2024-09-25 01:12:50 -07:00
Ashcon Partovi
117e1b3883 bun run prettier (#14153)
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
2024-09-24 22:46:18 -07:00
Ashcon Partovi
1e1025ca37 bun run zig-format (#14152) 2024-09-24 22:10:12 -07:00
Ashcon Partovi
30dc72c17b bun run clang-format (#14148)
Co-authored-by: Electroid <Electroid@users.noreply.github.com>
2024-09-24 20:39:29 -07:00
Jarred Sumner
17d719fa4e Make server.stop return a Promise that fulfills when all opened connections are closed (#14120) 2024-09-24 14:07:29 -07:00
Meghan Denny
0ac2a7da0a dont leak return value in crash_handler jsGetFeaturesAsVLQ (#14131) 2024-09-24 12:26:58 -07:00
Dylan Conway
9d23ce16ec fix(install): relative paths to tarballs in workspaces (#14121) 2024-09-23 22:44:24 -07:00
Ashcon Partovi
7d94c59545 Publish VSCode extension 2024-09-23 16:05:12 -07:00
Meghan Denny
33075394a4 cpp: always return empty JSValue value after throwing exception (#13935)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-09-23 13:53:08 -07:00
Jarred Sumner
ff9560c82a Fix unbalanced ref count involving file descriptors passed to Bun.connect (#14107) 2024-09-23 10:28:50 -07:00
Jarred Sumner
2f8c20ef82 Implement --registry CLI flag in bun install (#14090) 2024-09-22 21:27:33 -07:00
Jarred Sumner
d05070dbfd Fix regression from #13414 (#14092) 2024-09-22 16:02:49 -07:00
Jarred Sumner
1244907a92 Bump 2024-09-22 02:53:21 -07:00
Jarred Sumner
81e5ee26bd Don't re-create the FIFO in streams every time (#14088) 2024-09-22 02:13:11 -07:00
Jarred Sumner
27e7aa7923 Update from-npm-install-to-bun-install.md 2024-09-22 00:33:41 -07:00
Jarred Sumner
f89623aa5e Update from-npm-install-to-bun-install.md 2024-09-22 00:31:01 -07:00
Jarred Sumner
3cc51ceb98 Update from-npm-install-to-bun-install.md 2024-09-22 00:30:08 -07:00
Jarred Sumner
e944bb3638 Update from-npm-install-to-bun-install.md 2024-09-22 00:29:23 -07:00
Jarred Sumner
797750ef42 Update from-npm-install-to-bun-install.md 2024-09-22 00:27:47 -07:00
Jarred Sumner
c267d76f05 Update from-npm-install-to-bun-install.md 2024-09-22 00:25:47 -07:00
Jarred Sumner
c5c1e8ff3a Update from-npm-install-to-bun-install.md 2024-09-22 00:24:56 -07:00
Jarred Sumner
1eab8ec107 Update from-npm-install-to-bun-install.md 2024-09-22 00:23:44 -07:00
Jarred Sumner
60d8c8ad4c Update from-npm-install-to-bun-install.md 2024-09-22 00:17:41 -07:00
Jarred Sumner
dba108f8c4 Update from-npm-install-to-bun-install.md 2024-09-21 23:32:34 -07:00
Jarred Sumner
18251e1b60 Create from-npm-install-to-bun-install.md 2024-09-21 23:29:16 -07:00
Xmarmalade
0bc21b3ddf docs: add ccache for Windows System Dependencies (#14080) 2024-09-21 01:35:01 -07:00
Jarred Sumner
c298b23c45 Fix process.cwd on windows (#14081) 2024-09-21 01:32:23 -07:00
snwy
722e3fa481 fix for windows debug support (#14048)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-09-21 00:20:33 -07:00
Jarred Sumner
3fc092d23f Fix ci issue 2024-09-20 22:13:16 -07:00
Jarred Sumner
6e0847ca49 Fix searching for lld in $PATH on Linux 2024-09-20 22:09:21 -07:00
Jarred Sumner
7a190de2f1 Fix upload-s3.ts script 2024-09-20 21:34:50 -07:00
Dylan Conway
57a1d7b4ee update 2024-09-20 20:38:52 -07:00
Dylan Conway
3e0e99176a udpate 2024-09-20 20:37:49 -07:00
569 changed files with 70014 additions and 17965 deletions

View File

@@ -12,14 +12,14 @@ steps:
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-vendor"
label: ":darwin: aarch64 - build-vendor"
- key: "darwin-aarch64-build-deps"
label: ":darwin: aarch64 - build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: ":darwin: aarch64 - build-cpp"
@@ -46,7 +46,7 @@ steps:
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-vendor"
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
@@ -114,14 +114,14 @@ steps:
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-vendor"
label: ":darwin: x64 - build-vendor"
- key: "darwin-x64-build-deps"
label: ":darwin: x64 - build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: ":darwin: x64 - build-cpp"
@@ -148,7 +148,7 @@ steps:
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-vendor"
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
@@ -216,14 +216,14 @@ steps:
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-vendor"
label: ":linux: x64 - build-vendor"
- key: "linux-x64-build-deps"
label: ":linux: x64 - build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: ":linux: x64 - build-cpp"
@@ -250,7 +250,7 @@ steps:
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-vendor"
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
@@ -349,8 +349,8 @@ steps:
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-vendor"
label: ":linux: x64-baseline - build-vendor"
- key: "linux-x64-baseline-build-deps"
label: ":linux: x64-baseline - build-deps"
agents:
queue: "build-linux"
os: "linux"
@@ -358,7 +358,7 @@ steps:
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: ":linux: x64-baseline - build-cpp"
@@ -388,7 +388,7 @@ steps:
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-vendor"
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
@@ -488,14 +488,14 @@ steps:
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-vendor"
label: ":linux: aarch64 - build-vendor"
- key: "linux-aarch64-build-deps"
label: ":linux: aarch64 - build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: ":linux: aarch64 - build-cpp"
@@ -522,7 +522,7 @@ steps:
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-vendor"
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
@@ -621,14 +621,18 @@ steps:
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-vendor"
label: ":windows: x64 - build-vendor"
- key: "windows-x64-build-deps"
label: ":windows: x64 - build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: ":windows: x64 - build-cpp"
@@ -636,6 +640,10 @@ steps:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
@@ -655,9 +663,13 @@ steps:
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-vendor"
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
@@ -692,16 +704,20 @@ steps:
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-vendor"
label: ":windows: x64-baseline - build-vendor"
- key: "windows-x64-baseline-build-deps"
label: ":windows: x64-baseline - build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target vendor"
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: ":windows: x64-baseline - build-cpp"
@@ -709,6 +725,10 @@ steps:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
@@ -731,9 +751,13 @@ steps:
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-vendor"
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +0,0 @@
# Note: 2 blank lines are required between entries
Package: *
Pin: release a=eoan
Pin-Priority: 500
Package: *
Pin: origin "ftp.debian.org"
Pin-Priority: 300
# Pattern includes 'chromium', 'chromium-browser' and similarly
# named dependencies:
Package: chromium*
Pin: origin "ftp.debian.org"
Pin-Priority: 700

View File

@@ -1,8 +0,0 @@
#!/bin/bash
set -euxo pipefail
name=$(openssl rand -hex 12)
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
echo -e "bun-binary-$name"

View File

@@ -1,3 +0,0 @@
deb http://deb.debian.org/debian buster main
deb http://deb.debian.org/debian buster-updates main
deb http://deb.debian.org/debian-security buster/updates main

View File

@@ -1,34 +0,0 @@
export DOCKER_BUILDKIT=1
export BUILDKIT_ARCH=$(uname -m)
export ARCH=${BUILDKIT_ARCH}
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
export BUILDKIT_ARCH="amd64"
export ARCH=x64
fi
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
export BUILDKIT_ARCH="arm64"
export ARCH=aarch64
fi
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
echo "Unsupported platform: $BUILDKIT_ARCH"
exit 1
fi
export BUILD_ID=$(cat build-id)
export CONTAINER_NAME=bun-linux-$ARCH
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
export TEMP=/tmp/bun-0.0.$BUILD_ID

View File

@@ -1,11 +0,0 @@
#!/bin/bash
set -euxo pipefail
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base:latest bun-base:latest
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest

View File

@@ -1,47 +0,0 @@
#!/bin/bash
source "dockerfile-common.sh"
export $CONTAINER_NAME=$CONTAINER_NAME-local
rm -rf $TEMP
mkdir -p $TEMP
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
if (($?)); then
echo "Failed to build container"
exit 1
fi
id=$(docker create $CONTAINER_NAME:latest)
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
if (($?)); then
echo "Failed to cp container"
exit 1
fi
cd $TEMP
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
docker rm -v $id
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
case $(uname -s) in
"Linux") target="linux" ;;
*) target="other" ;;
esac
if [ "$target" = "linux" ]; then
if command -v bun --version >/dev/null; then
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
fi
fi
echo "Saved to:"
echo $debug_abs
echo $abs

View File

@@ -1,9 +0,0 @@
#!/bin/bash
set -euxo pipefail
bun install
bun install --cwd ./test/snippets
bun install --cwd ./test/scripts
make $BUN_TEST_NAME

View File

@@ -1,5 +0,0 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest

View File

@@ -1,5 +0,0 @@
#!/bin/bash
set -euxo pipefail
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest

View File

@@ -47,4 +47,5 @@ runs:
mkdir -p ${{ runner.temp }}/.bun/bin
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
chmod +x ${{ runner.temp }}/.bun/bin/*
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}

60
.github/workflows/clang-format.yml vendored Normal file
View File

@@ -0,0 +1,60 @@
name: clang-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
paths:
- ".github/workflows/clang-format.yml"
- ".clang-format"
- "package.json"
- "scripts/**"
- "cmake/**"
- "src/**/*.c"
- "src/**/*.cpp"
- "src/**/*.h"
- "packages/**/*.c"
- "packages/**/*.cpp"
- "packages/**/*.h"
env:
BUN_VERSION: "1.1.27"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-format:
name: clang-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
.clang-format
package.json
scripts
cmake
src
packages
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Format
env:
ENABLE_CCACHE: OFF
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-format:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-format`"

60
.github/workflows/clang-tidy.yml vendored Normal file
View File

@@ -0,0 +1,60 @@
name: clang-tidy
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
paths:
- ".github/workflows/clang-tidy.yml"
- ".clang-tidy"
- "package.json"
- "scripts/**"
- "cmake/**"
- "src/**/*.c"
- "src/**/*.cpp"
- "src/**/*.h"
- "packages/**/*.c"
- "packages/**/*.cpp"
- "packages/**/*.h"
env:
BUN_VERSION: "1.1.27"
LLVM_VERSION: "18.1.8"
LLVM_VERSION_MAJOR: "18"
jobs:
clang-tidy:
name: clang-tidy
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
.clang-tidy
package.json
scripts
cmake
src
packages
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Install LLVM
run: |
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
- name: Clang Tidy
env:
ENABLE_CCACHE: OFF
LLVM_VERSION: ${{ env.LLVM_VERSION }}
run: |
bun run clang-tidy:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run clang-tidy`"

View File

@@ -1,61 +0,0 @@
name: Format
permissions:
contents: write
concurrency:
group: format-${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
run-id:
type: string
description: The workflow ID to download artifacts (skips the build step)
pull_request:
jobs:
format:
name: Format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.prettierrc-ci
.github
.vscode
src
scripts
packages
test
bench
package.json
bun.lockb
.clang-format
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.25"
- name: Setup Zig
uses: mlugg/setup-zig@v1
with:
version: 0.13.0
- name: Install Dependencies
run: |
bun install
- name: Format
run: |
bun fmt
- name: Format Zig
run: |
bun fmt:zig
- name: Format Cpp
run: |
bun fmt:cpp
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Apply formatting changes

49
.github/workflows/prettier-format.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: prettier-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
paths:
- ".github/workflows/prettier-format.yml"
- "package.json"
- "scripts/**"
- "**.yml"
- "**.json"
- "**.js"
- "**.jsx"
- "**.ts"
- "**.tsx"
- "**.mjs"
- "**.cjs"
env:
BUN_VERSION: "1.1.27"
jobs:
prettier-format:
name: prettier-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Setup Dependencies
run: |
bun install
- name: Prettier Format
env:
ENABLE_CCACHE: OFF
SKIP_LLVM: ON
run: |
bun run prettier:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run prettier:extra`"

View File

@@ -1,56 +0,0 @@
name: Format
permissions:
contents: write
on:
workflow_call:
inputs:
zig-version:
type: string
required: true
jobs:
format:
name: Format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.prettierrc-ci
.github
.vscode
src
scripts
packages
test
bench
package.json
bun.lockb
.clang-format
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.25"
- name: Setup Zig
uses: mlugg/setup-zig@v1
with:
version: ${{ inputs.zig-version }}
- name: Install Dependencies
run: |
bun install
- name: Format
run: |
bun fmt
- name: Format Zig
run: |
bun fmt:zig
- name: Format Cpp
run: |
bun fmt:cpp
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: Apply formatting changes

48
.github/workflows/zig-format.yml vendored Normal file
View File

@@ -0,0 +1,48 @@
name: zig-format
permissions:
contents: write
on:
workflow_call:
workflow_dispatch:
pull_request:
paths:
- ".github/workflows/zig-format.yml"
- "package.json"
- "scripts/**"
- "cmake/**"
- "src/**/*.zig"
env:
BUN_VERSION: "1.1.27"
jobs:
zig-format:
name: zig-format
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.github
package.json
scripts
cmake
src
packages
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Zig Format
env:
ENABLE_CCACHE: OFF
SKIP_LLVM: ON
run: |
bun run zig-format:diff
- name: Commit
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "`bun run zig-format`"

3
.gitignore vendored
View File

@@ -142,9 +142,6 @@ test/node.js/upstream
scripts/env.local
*.generated.ts
# Temporary files
/tmp
# Dependencies
/vendor

View File

@@ -1,31 +0,0 @@
{
"arrowParens": "avoid",
"printWidth": 120,
"trailingComma": "all",
"useTabs": false,
"quoteProps": "preserve",
"plugins": [
"prettier-plugin-organize-imports"
],
"overrides": [
{
"files": [
".vscode/*.json"
],
"options": {
"parser": "jsonc",
"quoteProps": "preserve",
"singleQuote": false,
"trailingComma": "all"
}
},
{
"files": [
"*.md"
],
"options": {
"printWidth": 80
}
}
]
}

2
.vscode/launch.json generated vendored
View File

@@ -1192,4 +1192,4 @@
"description": "Usage: bun test [...]",
},
],
}
}

View File

@@ -52,12 +52,15 @@
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"[cpp]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"[c]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"[h]": {
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["-header-insertion=never"],

52
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,52 @@
{
"version": "2.0.0",
"tasks": [
{
"type": "process",
"label": "Install Dependencies",
"command": "scripts/all-dependencies.sh",
"windows": {
"command": "scripts/all-dependencies.ps1",
},
"icon": {
"id": "arrow-down",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Setup Environment",
"dependsOn": ["Install Dependencies"],
"command": "scripts/setup.sh",
"windows": {
"command": "scripts/setup.ps1",
},
"icon": {
"id": "check",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Build Bun",
"dependsOn": ["Setup Environment"],
"command": "bun",
"args": ["run", "build"],
"icon": {
"id": "gear",
},
"options": {
"cwd": "${workspaceFolder}",
},
"isBuildCommand": true,
"runOptions": {
"instanceLimit": 1,
"reevaluateOnRerun": true,
},
},
],
}

View File

@@ -30,6 +30,7 @@ include(CompilerFlags)
# --- Tools ---
include(SetupGit)
include(SetupBuildkite)
include(SetupBun)
include(SetupEsbuild)
@@ -38,21 +39,6 @@ include(SetupRust)
# --- Targets ---
include(BuildBoringSSL)
include(BuildBrotli)
include(BuildCares)
include(BuildLibDeflate)
include(BuildLibuv)
include(BuildLolHtml)
include(BuildLshpack)
include(BuildMimalloc)
include(BuildPicoHTTPParser)
include(BuildTinyCC)
include(BuildSQLite)
include(BuildWebKit)
include(BuildZlib)
include(BuildLibArchive) # must be loaded after zlib
include(BuildZstd)
include(BuildBun)
# --- Analysis ---
@@ -60,3 +46,4 @@ include(BuildBun)
include(RunClangFormat)
include(RunClangTidy)
include(RunZigFormat)
include(RunPrettier)

2
LATEST
View File

@@ -1 +1 @@
1.1.27
1.1.29

Binary file not shown.

View File

@@ -0,0 +1,24 @@
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {
modulusLength: 2048,
publicKeyEncoding: {
type: "spki",
format: "pem",
},
privateKeyEncoding: {
type: "pkcs8",
format: "pem",
},
});
// Max message size for 2048-bit RSA keys
const plaintext = crypto.getRandomValues(Buffer.alloc(245));
bench("RSA sign RSA_PKCS1_PADDING round-trip", () => {
const sig = crypto.privateEncrypt(keyPair.privateKey, plaintext);
crypto.publicDecrypt(keyPair.publicKey, sig);
});
await run();

View File

@@ -7,6 +7,7 @@
"@swc/core": "^1.2.133",
"benchmark": "^2.1.4",
"braces": "^3.0.2",
"color": "^4.2.3",
"esbuild": "^0.14.12",
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
@@ -14,6 +15,7 @@
"fdir": "^6.1.0",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"
},
"scripts": {

25
bench/snippets/color.mjs Normal file
View File

@@ -0,0 +1,25 @@
import Color from "color";
import tinycolor from "tinycolor2";
import { bench, run, group } from "./runner.mjs";
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
for (const input of inputs) {
group(`${input}`, () => {
if (typeof Bun !== "undefined") {
bench("Bun.color()", () => {
Bun.color(input, "css");
});
}
bench("color", () => {
Color(input).hex();
});
bench("'tinycolor2'", () => {
tinycolor(input).toHexString();
});
});
}
await run();

View File

@@ -157,7 +157,7 @@ pub fn build(b: *Build) !void {
// TODO: Upgrade path for 0.14.0
// b.graph.zig_lib_directory = brk: {
// const sub_path = "src/deps/zig/lib";
// const sub_path = "vendor/zig/lib";
// const dir = try b.build_root.handle.openDir(sub_path, .{});
// break :brk .{ .handle = dir, .path = try b.build_root.join(b.graph.arena, &.{sub_path}) };
// };
@@ -295,7 +295,7 @@ pub fn build(b: *Build) !void {
bun_check_obj.generated_bin = null;
step.dependOn(&bun_check_obj.step);
// The default install step will run zig build check This is so ZLS
// The default install step will run zig build check. This is so ZLS
// identifies the codebase, as well as performs checking if build on
// save is enabled.
@@ -368,6 +368,7 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
.root_source_file = switch (opts.os) {
.wasm => b.path("root_wasm.zig"),
else => b.path("root.zig"),
// else => b.path("root_css.zig"),
},
.target = opts.target,
.optimize = opts.optimize,

BIN
bun.lockb

Binary file not shown.

View File

@@ -1,6 +1,8 @@
# clang: https://clang.llvm.org/docs/CommandGuide/clang.html
# clang-cl: https://clang.llvm.org/docs/UsersManual.html#id11
# --- Macros ---
macro(setb variable)
if(${variable})
set(${variable} ON)
@@ -9,20 +11,20 @@ macro(setb variable)
endif()
endmacro()
set(bvariables WIN32 APPLE UNIX LINUX)
foreach(bvariable ${bvariables})
setb(${bvariable})
set(targets WIN32 APPLE UNIX LINUX)
foreach(target ${targets})
setb(${target})
endforeach()
# --- CPU target ---
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
register_compiler_flags(-mcpu=apple-m1)
else()
register_compiler_flags(-march=armv8-a+crc -mtune=ampere1)
endif()
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
if(ENABLE_BASELINE)
register_compiler_flags(-march=nehalem)
else()
@@ -33,7 +35,6 @@ else()
endif()
# --- MSVC runtime ---
if(WIN32)
register_compiler_flags(
DESCRIPTION "Use static MSVC runtime"
@@ -44,7 +45,6 @@ if(WIN32)
endif()
# --- Optimization level ---
if(DEBUG)
register_compiler_flags(
DESCRIPTION "Disable optimization"
@@ -66,7 +66,6 @@ else()
endif()
# --- Debug level ---
if(WIN32)
register_compiler_flags(
DESCRIPTION "Enable debug symbols (.pdb)"
@@ -98,7 +97,6 @@ endif()
# -fno-eliminate-unused-debug-types # Don't eliminate unused debug symbols
# --- C/C++ flags ---
register_compiler_flags(
DESCRIPTION "Disable C/C++ exceptions"
-fno-exceptions ${UNIX}
@@ -106,8 +104,8 @@ register_compiler_flags(
)
register_compiler_flags(
LANGUAGE CXX
DESCRIPTION "Disable C++ static destructors"
LANGUAGES CXX
-Xclang ${WIN32}
-fno-c++-static-destructors
)
@@ -151,9 +149,9 @@ register_compiler_flags(
/Gw ${WIN32}
)
# This causes libarchive to fail on macOS, with the error:
# having this enabled in debug mode on macOS >=14 causes libarchive to fail to configure with the error:
# > pid_t doesn't exist on this platform?
if((DEBUG AND LINUX) OR ((NOT DEBUG) AND UNIX))
if((DEBUG AND LINUX) OR((NOT DEBUG) AND UNIX))
register_compiler_flags(
DESCRIPTION "Emit an address-significance table"
-faddrsig
@@ -173,7 +171,6 @@ if(WIN32)
endif()
# --- Linker flags ---
if(LINUX)
register_linker_flags(
DESCRIPTION "Disable relocation read-only (RELRO)"
@@ -185,7 +182,6 @@ endif()
# Note: This is a helpful guide about assertions:
# https://best.openssf.org/Compiler-Hardening-Guides/Compiler-Options-Hardening-Guide-for-C-and-C++
if(ENABLE_ASSERTIONS)
register_compiler_flags(
DESCRIPTION "Do not eliminate null-pointer checks"
@@ -231,7 +227,6 @@ else()
endif()
# --- Diagnostics ---
if(UNIX)
register_compiler_flags(
DESCRIPTION "Enable color diagnostics"
@@ -245,7 +240,6 @@ register_compiler_flags(
)
# --- LTO ---
if(ENABLE_LTO)
register_compiler_flags(
DESCRIPTION "Enable link-time optimization (LTO)"
@@ -255,8 +249,8 @@ if(ENABLE_LTO)
if(UNIX)
register_compiler_flags(
LANGUAGE CXX
DESCRIPTION "Enable virtual tables"
LANGUAGES CXX
-fforce-emit-vtables
-fwhole-program-vtables
)
@@ -271,7 +265,6 @@ if(ENABLE_LTO)
endif()
# --- Remapping ---
if(UNIX)
register_compiler_flags(
DESCRIPTION "Remap source files"
@@ -289,12 +282,6 @@ if(ENABLE_VALGRIND AND ARCH STREQUAL "x64")
register_compiler_definitions(__SSE4_2__=0)
endif()
if(APPLE)
# The $NOCANCEL variants of various system calls are activated by compiling
# with __DARWIN_NON_CANCELABLE, which prevents them from being pthread cancellation points.
register_compiler_definitions(__DARWIN_NON_CANCELABLE=1)
endif()
# --- Other ---
# Workaround for CMake and clang-cl bug.

View File

@@ -124,12 +124,19 @@ optionx(CACHE_STRATEGY "read-write|read-only|write-only|none" "The strategy to u
optionx(CI BOOL "If CI is enabled" DEFAULT OFF)
if(CI)
set(DEFAULT_VENDOR_PATH ${CACHE_PATH}/vendor)
set(WARNING FATAL_ERROR)
else()
set(DEFAULT_VENDOR_PATH ${CWD}/vendor)
set(WARNING WARNING)
endif()
optionx(VENDOR_PATH FILEPATH "The path to the vendor directory" DEFAULT ${DEFAULT_VENDOR_PATH})
# TODO: This causes flaky zig builds in CI, so temporarily disable it.
# if(CI)
# set(DEFAULT_VENDOR_PATH ${CACHE_PATH}/vendor)
# else()
# set(DEFAULT_VENDOR_PATH ${CWD}/vendor)
# endif()
optionx(VENDOR_PATH FILEPATH "The path to the vendor directory" DEFAULT ${CWD}/vendor)
optionx(TMP_PATH FILEPATH "The path to the temporary directory" DEFAULT ${BUILD_PATH}/tmp)
optionx(FRESH BOOL "Set when --fresh is used" DEFAULT OFF)
@@ -298,18 +305,19 @@ endfunction()
# SOURCES string[] - The files that this command depends on
# OUTPUTS string[] - The files that this command produces
# ARTIFACTS string[] - The files that this command produces, and uploads as an artifact in CI
# ALWAYS_RUN bool - If true, the command will always run
# TARGET string - The target to register the command with
# TARGET_PHASE string - The target phase to register the command with (e.g. PRE_BUILD, PRE_LINK, POST_BUILD)
# GROUP string - The group to register the command with (e.g. similar to JOB_POOL)
function(register_command)
set(options ALWAYS_RUN)
set(args COMMENT CWD TARGET TARGET_PHASE GROUP)
set(multiArgs COMMAND ENVIRONMENT TARGETS SOURCES OUTPUTS ARTIFACTS)
cmake_parse_arguments(CMD "" "${args}" "${multiArgs}" ${ARGN})
cmake_parse_arguments(CMD "${options}" "${args}" "${multiArgs}" ${ARGN})
if(NOT CMD_COMMAND)
message(FATAL_ERROR "register_command: COMMAND is required")
endif()
parse_list(CMD_COMMAND CMD_COMMAND)
if(NOT CMD_CWD)
set(CMD_CWD ${CWD})
@@ -348,6 +356,10 @@ function(register_command)
list(APPEND CMD_EFFECTIVE_DEPENDS ${source})
endforeach()
if(NOT CMD_EFFECTIVE_DEPENDS AND NOT CMD_ALWAYS_RUN)
message(FATAL_ERROR "register_command: TARGETS or SOURCES is required")
endif()
set(CMD_EFFECTIVE_OUTPUTS)
foreach(output ${CMD_OUTPUTS})
@@ -371,13 +383,12 @@ function(register_command)
foreach(output ${CMD_EFFECTIVE_OUTPUTS})
get_source_file_property(generated ${output} GENERATED)
if(generated)
add_custom_target(${CMD_TARGET})
message(WARNING "Skipping ${CMD_TARGET}, since ${output} is generated by another target")
return()
list(REMOVE_ITEM CMD_EFFECTIVE_OUTPUTS ${output})
list(APPEND CMD_EFFECTIVE_OUTPUTS ${output}.always_run_${CMD_TARGET})
endif()
endforeach()
if(NOT CMD_EFFECTIVE_OUTPUTS)
if(CMD_ALWAYS_RUN)
list(APPEND CMD_EFFECTIVE_OUTPUTS ${CMD_CWD}/.always_run_${CMD_TARGET})
endif()
@@ -399,6 +410,10 @@ function(register_command)
return()
endif()
if(NOT CMD_EFFECTIVE_OUTPUTS)
message(FATAL_ERROR "register_command: OUTPUTS or ARTIFACTS is required, or set ALWAYS_RUN")
endif()
if(CMD_TARGET)
if(TARGET ${CMD_TARGET})
message(FATAL_ERROR "register_command: TARGET is already registered: ${CMD_TARGET}")
@@ -411,8 +426,6 @@ function(register_command)
if(TARGET clone-${CMD_TARGET})
add_dependencies(${CMD_TARGET} clone-${CMD_TARGET})
endif()
set_property(TARGET ${CMD_TARGET} PROPERTY OUTPUT ${CMD_EFFECTIVE_OUTPUTS} APPEND)
set_property(TARGET ${CMD_TARGET} PROPERTY DEPENDS ${CMD_EFFECTIVE_DEPENDS} APPEND)
endif()
add_custom_command(
@@ -583,664 +596,298 @@ function(register_repository)
${CMAKE_COMMAND}
-DGIT_PATH=${GIT_PATH}
-DGIT_REPOSITORY=${GIT_REPOSITORY}
-DGIT_BRANCH=${GIT_BRANCH}
-DGIT_TAG=${GIT_TAG}
-DGIT_COMMIT=${GIT_COMMIT}
-DGIT_NAME=${GIT_NAME}
-DGIT_COMMIT=${GIT_COMMIT}
-DGIT_TAG=${GIT_TAG}
-DGIT_BRANCH=${GIT_BRANCH}
-P ${CWD}/cmake/scripts/GitClone.cmake
OUTPUTS
${GIT_PATH}
${GIT_EFFECTIVE_OUTPUTS}
)
register_outputs(TARGET clone-${GIT_NAME} ${GIT_PATH})
endfunction()
function(parse_language variable)
if(NOT ${variable})
set(${variable} C CXX PARENT_SCOPE)
# register_cmake_command()
# Description:
# Registers a command that builds an external CMake project.
# Arguments:
# TARGET string - The target to register the command with
# ARGS string[] - The arguments to pass to CMake (e.g. -DKEY=VALUE)
# CWD string - The directory where the CMake files are located
# BUILD_PATH string - The path to build the project to
# LIB_PATH string - The path to the libraries
# TARGETS string[] - The targets to build from CMake
# LIBRARIES string[] - The libraries that are built
# INCLUDES string[] - The include paths
function(register_cmake_command)
set(args TARGET CWD BUILD_PATH LIB_PATH)
set(multiArgs ARGS TARGETS LIBRARIES INCLUDES)
# Use "MAKE" instead of "CMAKE" to prevent conflicts with CMake's own CMAKE_* variables
cmake_parse_arguments(MAKE "" "${args}" "${multiArgs}" ${ARGN})
if(NOT MAKE_TARGET)
message(FATAL_ERROR "register_cmake_command: TARGET is required")
endif()
foreach(value ${${variable}})
if(NOT value MATCHES "^(C|CXX)$")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid language: \"${value}\"")
endif()
endforeach()
endfunction()
function(parse_target variable)
foreach(value ${${variable}})
if(NOT TARGET ${value})
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid target: \"${value}\"")
endif()
endforeach()
endfunction()
if(TARGET ${MAKE_TARGET})
message(FATAL_ERROR "register_cmake_command: TARGET is already a target: ${MAKE_TARGET}")
endif()
function(parse_path variable)
foreach(value ${${variable}})
if(NOT IS_ABSOLUTE ${value})
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: ${variable} is not an absolute path: \"${value}\"")
endif()
if(NOT ${value} MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: ${variable} is not in the source, build, cache, or vendor path: \"${value}\"")
endif()
endforeach()
endfunction()
if(NOT MAKE_CWD)
set(MAKE_CWD ${VENDOR_PATH}/${MAKE_TARGET})
endif()
function(parse_list list variable)
set(result)
macro(check_expression)
if(DEFINED expression)
if(NOT (${expression}))
list(POP_BACK result)
if(NOT MAKE_BUILD_PATH)
set(MAKE_BUILD_PATH ${BUILD_PATH}/${MAKE_TARGET})
endif()
if(MAKE_LIB_PATH)
set(MAKE_LIB_PATH ${MAKE_BUILD_PATH}/${MAKE_LIB_PATH})
else()
set(MAKE_LIB_PATH ${MAKE_BUILD_PATH})
endif()
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
set(setFlags GENERATOR BUILD_TYPE)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
set(specialFlags POSITION_INDEPENDENT_CODE)
set(flags ${setFlags} ${appendFlags} ${specialFlags})
foreach(arg ${MAKE_ARGS})
foreach(flag ${flags})
if(arg MATCHES "-DCMAKE_${flag}=(.*)")
if(DEFINED MAKE_${flag})
message(FATAL_ERROR "register_cmake_command: CMAKE_${flag} was already set: \"${MAKE_${flag}}\"")
endif()
set(MAKE_${flag} ${CMAKE_MATCH_1})
set(${arg}_USED ON)
endif()
unset(expression)
endforeach()
if(NOT ${arg}_USED)
list(APPEND MAKE_EFFECTIVE_ARGS ${arg})
endif()
endmacro()
endforeach()
foreach(item ${${list}})
if(item MATCHES "^(ON|OFF|AND|OR|NOT)$")
set(expression ${expression} ${item})
foreach(flag ${setFlags})
if(NOT DEFINED MAKE_${flag} AND DEFINED CMAKE_${flag})
set(MAKE_${flag} ${CMAKE_${flag}})
endif()
endforeach()
foreach(flag ${appendFlags})
if(MAKE_${flag})
set(MAKE_${flag} "${CMAKE_${flag}} ${MAKE_${flag}}")
else()
check_expression()
list(APPEND result ${item})
set(MAKE_${flag} ${CMAKE_${flag}})
endif()
endforeach()
check_expression()
set(${variable} ${result} PARENT_SCOPE)
endfunction()
# register_target()
# Description:
# Registers a target that does nothing.
# Arguments:
# target string - The name of the target
function(register_target target)
add_custom_target(${target})
set(${target} ${target} PARENT_SCOPE)
set(${target}_CWD ${CWD} PARENT_SCOPE)
set(${target}_BUILD_PATH ${BUILD_PATH} PARENT_SCOPE)
endfunction()
# register_vendor_target()
# Description:
# Registers a target that does nothing.
# Arguments:
# target string - The name of the target
function(register_vendor_target target)
add_custom_target(${target})
set(${target} ${target} PARENT_SCOPE)
set(${target}_CWD ${VENDOR_PATH}/${target} PARENT_SCOPE)
set(${target}_BUILD_PATH ${BUILD_PATH}/vendor/${target} PARENT_SCOPE)
if(NOT TARGET vendor)
add_custom_target(vendor)
if(MAKE_POSITION_INDEPENDENT_CODE AND NOT WIN32)
set(MAKE_C_FLAGS "${MAKE_C_FLAGS} -fPIC")
set(MAKE_CXX_FLAGS "${MAKE_CXX_FLAGS} -fPIC")
elseif(APPLE)
set(MAKE_C_FLAGS "${MAKE_C_FLAGS} -fno-pic -fno-pie")
set(MAKE_CXX_FLAGS "${MAKE_CXX_FLAGS} -fno-pic -fno-pie")
endif()
add_dependencies(vendor ${target})
endfunction()
# register_outputs()
# Description:
# Registers outputs that are built from a target.
# Arguments:
# TARGET string - The target that builds the outputs
# outputs string[] - The list of outputs
function(register_outputs)
set(args TARGET PATH)
cmake_parse_arguments(OUTPUT "" "${args}" "" ${ARGN})
parse_target(OUTPUT_TARGET)
parse_list(OUTPUT_UNPARSED_ARGUMENTS OUTPUT_PATHS)
parse_path(OUTPUT_PATHS)
foreach(path ${OUTPUT_PATHS})
set_property(GLOBAL PROPERTY ${path} ${OUTPUT_TARGET} APPEND)
set_property(TARGET ${OUTPUT_TARGET} PROPERTY OUTPUT ${path} APPEND)
set(effectiveFlags ${setFlags} ${appendFlags})
foreach(flag ${effectiveFlags})
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}")
endforeach()
endfunction()
# register_inputs()
# Description:
# Registers inputs that are required to build a target.
# Arguments:
# TARGET string - The target that builds the inputs
# inputs string[] - The list of inputs
function(register_inputs)
set(args TARGET)
cmake_parse_arguments(INPUT "" "${args}" "" ${ARGN})
if(DEFINED FRESH)
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
endif()
parse_target(INPUT_TARGET)
parse_list(INPUT_UNPARSED_ARGUMENTS INPUT_PATHS)
register_command(
COMMENT "Configuring ${MAKE_TARGET}"
TARGET configure-${MAKE_TARGET}
COMMAND ${CMAKE_COMMAND} ${MAKE_EFFECTIVE_ARGS}
CWD ${MAKE_CWD}
OUTPUTS ${MAKE_BUILD_PATH}/CMakeCache.txt
)
foreach(path ${INPUT_PATHS})
set(search ${path})
set(found OFF)
while(search)
if(EXISTS ${search})
set(found ON)
break()
if(TARGET clone-${MAKE_TARGET})
add_dependencies(configure-${MAKE_TARGET} clone-${MAKE_TARGET})
endif()
set(MAKE_BUILD_ARGS --build ${MAKE_BUILD_PATH} --config ${MAKE_BUILD_TYPE})
set(MAKE_EFFECTIVE_LIBRARIES)
set(MAKE_ARTIFACTS)
foreach(lib ${MAKE_LIBRARIES})
if(lib MATCHES "^(WIN32|UNIX|APPLE)$")
if(${lib})
continue()
else()
list(POP_BACK MAKE_ARTIFACTS)
endif()
get_property(target GLOBAL PROPERTY ${search})
if(TARGET ${target})
set(found ON)
set_property(TARGET ${target} PROPERTY OUTPUT ${path} APPEND)
break()
else()
list(APPEND MAKE_EFFECTIVE_LIBRARIES ${lib})
if(lib MATCHES "\\.")
list(APPEND MAKE_ARTIFACTS ${MAKE_LIB_PATH}/${lib})
else()
list(APPEND MAKE_ARTIFACTS ${MAKE_LIB_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${lib}${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
get_filename_component(next_search ${search} DIRECTORY)
if(next_search STREQUAL search OR next_search STREQUAL ${CWD} OR next_search STREQUAL ${VENDOR_PATH})
break()
endif()
set(search ${next_search})
endwhile()
if(NOT found)
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: ${path} does not have a target")
endif()
endforeach()
endfunction()
# upload_artifacts()
# Description:
# Uploads artifacts after a target has been built.
# Arguments:
# TARGET string - The target to upload artifacts for
# artifacts string[] - The artifacts to upload
function(upload_artifacts)
set(args TARGET)
cmake_parse_arguments(ARTIFACT "" "${args}" "" ${ARGN})
parse_target(ARTIFACT_TARGET)
get_target_property(imported ${ARTIFACT_TARGET} IMPORTED)
if(imported)
return()
if(NOT MAKE_TARGETS)
set(MAKE_TARGETS ${MAKE_EFFECTIVE_LIBRARIES})
endif()
parse_list(ARTIFACT_UNPARSED_ARGUMENTS ARTIFACT_PATHS)
foreach(artifact ${ARTIFACT_PATHS})
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
add_custom_command(
TARGET ${ARTIFACT_TARGET} POST_BUILD
COMMENT "Uploading ${filename}"
COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename}
)
set_property(TARGET ${ARTIFACT_TARGET} PROPERTY OUTPUT ${artifact} APPEND)
foreach(target ${MAKE_TARGETS})
list(APPEND MAKE_BUILD_ARGS --target ${target})
endforeach()
set(MAKE_EFFECTIVE_INCLUDES)
foreach(include ${MAKE_INCLUDES})
if(include STREQUAL ".")
list(APPEND MAKE_EFFECTIVE_INCLUDES ${MAKE_CWD})
else()
list(APPEND MAKE_EFFECTIVE_INCLUDES ${MAKE_CWD}/${include})
endif()
endforeach()
register_command(
COMMENT "Building ${MAKE_TARGET}"
TARGET ${MAKE_TARGET}
TARGETS configure-${MAKE_TARGET}
COMMAND ${CMAKE_COMMAND} ${MAKE_BUILD_ARGS}
CWD ${MAKE_CWD}
ARTIFACTS ${MAKE_ARTIFACTS}
)
if(MAKE_EFFECTIVE_INCLUDES)
target_include_directories(${bun} PRIVATE ${MAKE_EFFECTIVE_INCLUDES})
if(TARGET clone-${MAKE_TARGET} AND NOT BUN_LINK_ONLY)
add_dependencies(${bun} clone-${MAKE_TARGET})
endif()
endif()
# HACK: Workaround for duplicate symbols when linking mimalloc.o
# >| duplicate symbol '_mi_page_queue_append(mi_heap_s*, mi_page_queue_s*, mi_page_queue_s*)' in:
# >| mimalloc/CMakeFiles/mimalloc-obj.dir/src/static.c.o
# >| ld: 287 duplicate symbols for architecture arm64
if(NOT BUN_LINK_ONLY OR NOT MAKE_ARTIFACTS MATCHES "static.c.o")
target_link_libraries(${bun} PRIVATE ${MAKE_ARTIFACTS})
endif()
if(BUN_LINK_ONLY)
target_sources(${bun} PRIVATE ${MAKE_ARTIFACTS})
endif()
endfunction()
# register_compiler_flags()
# register_compiler_flag()
# Description:
# Registers a compiler flag, similar to `add_compile_options()`, but has more validation and features.
# Arguments:
# TARGET string - The target to register the flag (default: all)
# LANGUAGE string - The language to register the flag (default: C, CXX)
# DESCRIPTION string - The description of the flag
# flags string[] - The flags to register
# DESCRIPTION string - The description of the flag
# LANGUAGES string[] - The languages to register the flag (default: C, CXX)
# TARGETS string[] - The targets to register the flag (default: all)
function(register_compiler_flags)
set(args TARGET LANGUAGE DESCRIPTION)
cmake_parse_arguments(COMPILER "" "${args}" "" ${ARGN})
set(args DESCRIPTION)
set(multiArgs LANGUAGES TARGETS)
cmake_parse_arguments(COMPILER "" "${args}" "${multiArgs}" ${ARGN})
parse_target(COMPILER_TARGET)
parse_language(COMPILER_LANGUAGE)
parse_list(COMPILER_UNPARSED_ARGUMENTS COMPILER_FLAGS)
if(NOT COMPILER_LANGUAGES)
set(COMPILER_LANGUAGES C CXX)
endif()
foreach(flag ${COMPILER_FLAGS})
if(NOT flag MATCHES "^(-|/)")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid flag: \"${flag}\"")
set(COMPILER_FLAGS)
foreach(flag ${COMPILER_UNPARSED_ARGUMENTS})
if(flag STREQUAL "ON")
continue()
elseif(flag STREQUAL "OFF")
list(POP_BACK COMPILER_FLAGS)
elseif(flag MATCHES "^(-|/)")
list(APPEND COMPILER_FLAGS ${flag})
else()
message(FATAL_ERROR "register_compiler_flags: Invalid flag: \"${flag}\"")
endif()
endforeach()
foreach(language ${COMPILER_LANGUAGE})
list(JOIN COMPILER_FLAGS " " COMPILER_FLAGS_STRING)
if(NOT COMPILER_TARGET)
set(CMAKE_${language}_FLAGS "${CMAKE_${language}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
foreach(target ${COMPILER_TARGETS})
if(NOT TARGET ${target})
message(FATAL_ERROR "register_compiler_flags: \"${target}\" is not a target")
endif()
foreach(target ${COMPILER_TARGET})
set(${target}_CMAKE_${language}_FLAGS "${${target}_CMAKE_${language}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
endforeach()
foreach(lang ${COMPILER_LANGUAGES})
list(JOIN COMPILER_FLAGS " " COMPILER_FLAGS_STRING)
if(NOT COMPILER_TARGETS)
set(CMAKE_${lang}_FLAGS "${CMAKE_${lang}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
endif()
foreach(target ${COMPILER_TARGETS})
set(${target}_CMAKE_${lang}_FLAGS "${${target}_CMAKE_${lang}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
endforeach()
endforeach()
foreach(language ${COMPILER_LANGUAGE})
foreach(lang ${COMPILER_LANGUAGES})
foreach(flag ${COMPILER_FLAGS})
if(NOT COMPILER_TARGET)
add_compile_options($<$<COMPILE_LANGUAGE:${language}>:${flag}>)
if(NOT COMPILER_TARGETS)
add_compile_options($<$<COMPILE_LANGUAGE:${lang}>:${flag}>)
endif()
foreach(target ${COMPILER_TARGET})
foreach(target ${COMPILER_TARGETS})
get_target_property(type ${target} TYPE)
get_target_property(imported ${target} IMPORTED)
if(type MATCHES "EXECUTABLE|LIBRARY" AND NOT imported)
target_compile_options(${target} PRIVATE $<$<COMPILE_LANGUAGE:${language}>:${flag}>)
if(type MATCHES "EXECUTABLE|LIBRARY")
target_compile_options(${target} PRIVATE $<$<COMPILE_LANGUAGE:${lang}>:${flag}>)
endif()
endforeach()
endforeach()
endforeach()
endfunction()
# register_compiler_definitions()
# Description:
# Registers a compiler definition, similar to `add_compile_definitions()`.
# Arguments:
# TARGET string - The target to register the definitions (default: all)
# LANGUAGE string - The language to register the definitions (default: C, CXX)
# DESCRIPTION string - The description of the definitions
# definitions string[] - The definitions to register
function(register_compiler_definitions)
set(args TARGET LANGUAGE DESCRIPTION)
cmake_parse_arguments(COMPILER "" "${args}" "" ${ARGN})
parse_language(COMPILER_LANGUAGE)
parse_target(COMPILER_TARGET)
parse_list(COMPILER_UNPARSED_ARGUMENTS COMPILER_DEFINITIONS)
foreach(definition ${COMPILER_DEFINITIONS})
if(NOT definition MATCHES "^([A-Z_][A-Z0-9_]*)")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid definition: \"${definition}\"")
endif()
endforeach()
if(WIN32)
list(TRANSFORM COMPILER_DEFINITIONS PREPEND "/D" OUTPUT_VARIABLE COMPILER_FLAGS)
else()
list(TRANSFORM COMPILER_DEFINITIONS PREPEND "-D" OUTPUT_VARIABLE COMPILER_FLAGS)
endif()
foreach(language ${COMPILER_LANGUAGE})
list(JOIN COMPILER_FLAGS " " COMPILER_FLAGS_STRING)
if(NOT COMPILER_TARGET)
set(CMAKE_${language}_FLAGS "${CMAKE_${language}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
endif()
foreach(target ${COMPILER_TARGET})
set(${target}_CMAKE_${language}_FLAGS "${${target}_CMAKE_${language}_FLAGS} ${COMPILER_FLAGS_STRING}" PARENT_SCOPE)
endforeach()
endforeach()
foreach(definition ${COMPILER_DEFINITIONS})
foreach(language ${COMPILER_LANGUAGE})
if(NOT COMPILER_TARGET)
add_compile_definitions($<$<COMPILE_LANGUAGE:${language}>:${definition}>)
endif()
foreach(target ${COMPILER_TARGET})
get_target_property(type ${target} TYPE)
get_target_property(imported ${target} IMPORTED)
if(type MATCHES "EXECUTABLE|LIBRARY" AND NOT imported)
target_compile_definitions(${target} PRIVATE $<$<COMPILE_LANGUAGE:${language}>:${definition}>)
endif()
endforeach()
endforeach()
endforeach()
endfunction()
# register_linker_flags()
# Description:
# Registers a linker flag, similar to `add_link_options()`.
# Arguments:
# TARGET string - The target to register the flag (default: all)
# DESCRIPTION string - The description of the flag
# flags string[] - The flags to register
# DESCRIPTION string - The description of the flag
function(register_linker_flags)
set(args TARGET DESCRIPTION)
set(args DESCRIPTION)
cmake_parse_arguments(LINKER "" "${args}" "" ${ARGN})
parse_target(LINKER_TARGET)
parse_list(LINKER_UNPARSED_ARGUMENTS LINKER_FLAGS)
foreach(flag ${LINKER_FLAGS})
if(NOT flag MATCHES "^(-|/)")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid flag: \"${flag}\"")
foreach(flag ${LINKER_UNPARSED_ARGUMENTS})
if(flag STREQUAL "ON")
continue()
elseif(flag STREQUAL "OFF")
list(POP_FRONT LINKER_FLAGS)
elseif(flag MATCHES "^(-|/)")
list(APPEND LINKER_FLAGS ${flag})
else()
message(FATAL_ERROR "register_linker_flags: Invalid flag: \"${flag}\"")
endif()
endforeach()
list(JOIN LINKER_FLAGS " " LINKER_FLAGS_STRING)
if(NOT LINKER_TARGET)
set(CMAKE_LINKER_FLAGS "${CMAKE_LINKER_FLAGS} ${LINKER_FLAGS_STRING}" PARENT_SCOPE)
endif()
foreach(target ${LINKER_TARGET})
set(${target}_CMAKE_LINKER_FLAGS "${${target}_CMAKE_LINKER_FLAGS} ${LINKER_FLAGS_STRING}" PARENT_SCOPE)
endforeach()
add_link_options(${LINKER_FLAGS})
endfunction()
if(NOT LINKER_TARGET)
add_link_options(${LINKER_FLAGS})
endif()
foreach(target ${LINKER_TARGET})
function(print_compiler_flags)
get_property(targets DIRECTORY PROPERTY BUILDSYSTEM_TARGETS)
set(languages C CXX)
foreach(target ${targets})
get_target_property(type ${target} TYPE)
if(type MATCHES "EXECUTABLE|LIBRARY")
target_link_options(${target} PUBLIC ${LINKER_FLAGS})
endif()
endforeach()
endfunction()
# register_includes()
# Description:
# Registers a include directory, similar to `target_include_directories()`.
# Arguments:
# TARGET string - The target to register the include (default: all)
# LANGUAGE string - The language to register the include (default: C, CXX)
# DESCRIPTION string - The description of the include
# paths string[] - The include paths to register
function(register_includes)
set(args TARGET LANGUAGE DESCRIPTION)
cmake_parse_arguments(INCLUDE "" "${args}" "" ${ARGN})
parse_target(INCLUDE_TARGET)
parse_language(INCLUDE_LANGUAGE)
parse_list(INCLUDE_UNPARSED_ARGUMENTS INCLUDE_PATHS)
parse_path(INCLUDE_PATHS)
register_inputs(TARGET ${INCLUDE_TARGET} ${INCLUDE_PATHS})
list(TRANSFORM INCLUDE_PATHS PREPEND "-I" OUTPUT_VARIABLE INCLUDE_FLAGS)
list(JOIN INCLUDE_FLAGS " " INCLUDE_FLAGS_STRING)
foreach(language ${INCLUDE_LANGUAGE})
if(NOT INCLUDE_TARGET)
set(CMAKE_${language}_FLAGS "${CMAKE_${language}_FLAGS} ${INCLUDE_FLAGS_STRING}" PARENT_SCOPE)
endif()
foreach(target ${INCLUDE_TARGET})
set(${target}_CMAKE_${language}_FLAGS "${${target}_CMAKE_${language}_FLAGS} ${INCLUDE_FLAGS_STRING}" PARENT_SCOPE)
endforeach()
if(NOT INCLUDE_TARGET)
add_include_directories(${INCLUDE_PATHS})
endif()
foreach(target ${INCLUDE_TARGET})
get_target_property(type ${target} TYPE)
get_target_property(imported ${target} IMPORTED)
if(type MATCHES "EXECUTABLE|LIBRARY" AND NOT imported)
target_include_directories(${target} PUBLIC ${INCLUDE_PATHS})
message(STATUS "Target: ${target}")
foreach(lang ${languages})
if(${target}_CMAKE_${lang}_FLAGS)
message(STATUS " ${lang} Flags: ${${target}_CMAKE_${lang}_FLAGS}")
endif()
endforeach()
endforeach()
endfunction()
# register_libraries()
# Description:
# Registers libraries that are built from a target.
# Arguments:
# TARGET string - The target that builds the libraries
# PATH string - The relative path to the libraries
# VARIABLE string - The variable to set to the libraries
# libraries string[] - The libraries to register
function(register_libraries)
set(args TARGET PATH VARIABLE)
cmake_parse_arguments(LIBRARY "" "${args}" "" ${ARGN})
parse_target(LIBRARY_TARGET)
parse_list(LIBRARY_UNPARSED_ARGUMENTS LIBRARY_NAMES)
if(LIBRARY_PATH)
if(NOT IS_ABSOLUTE ${LIBRARY_PATH})
set(LIBRARY_PATH ${${LIBRARY_TARGET}_BUILD_PATH}/${LIBRARY_PATH})
endif()
else()
set(LIBRARY_PATH ${${LIBRARY_TARGET}_BUILD_PATH})
endif()
parse_path(LIBRARY_PATH)
set(LIBRARY_PATHS)
foreach(name ${LIBRARY_NAMES})
if(name MATCHES "\\.")
list(APPEND LIBRARY_PATHS ${LIBRARY_PATH}/${name})
else()
list(APPEND LIBRARY_PATHS ${LIBRARY_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${name}${CMAKE_STATIC_LIBRARY_SUFFIX})
foreach(lang ${languages})
message(STATUS "Language: ${lang}")
if(CMAKE_${lang}_FLAGS)
message(STATUS " Flags: ${CMAKE_${lang}_FLAGS}")
endif()
endforeach()
set_property(TARGET ${LIBRARY_TARGET} PROPERTY OUTPUT ${LIBRARY_PATHS} APPEND)
if(LIBRARY_VARIABLE)
set(${LIBRARY_VARIABLE} ${LIBRARY_PATHS} PARENT_SCOPE)
endif()
endfunction()
function(get_libraries target variable)
get_target_property(libraries ${target} OUTPUT)
if(libraries MATCHES "NOTFOUND")
set(libraries)
endif()
set(${variable} ${libraries} PARENT_SCOPE)
endfunction()
# register_cmake_project()
# Description:
# Registers an external CMake project.
# Arguments:
# TARGET string - The target to associate the project
# CWD string - The working directory of the project
# CMAKE_TARGET string[] - The CMake targets to build
# CMAKE_PATH string - The path to the CMake project (default: CWD)
function(register_cmake_project)
set(args TARGET CWD CMAKE_PATH LIBRARY_PATH)
set(multiArgs CMAKE_TARGET)
cmake_parse_arguments(PROJECT "" "${args}" "${multiArgs}" ${ARGN})
parse_target(PROJECT_TARGET)
if(NOT PROJECT_CWD)
set(PROJECT_CWD ${VENDOR_PATH}/${PROJECT_TARGET})
endif()
parse_path(PROJECT_CWD)
if(PROJECT_CMAKE_PATH)
set(PROJECT_CMAKE_PATH ${PROJECT_CWD}/${PROJECT_CMAKE_PATH})
else()
set(PROJECT_CMAKE_PATH ${PROJECT_CWD})
endif()
parse_path(PROJECT_CMAKE_PATH)
set(PROJECT_BUILD_PATH ${BUILD_PATH}/vendor/${PROJECT_TARGET})
set(PROJECT_TOOLCHAIN_PATH ${PROJECT_BUILD_PATH}/CMakeLists-toolchain.txt)
register_command(
TARGET
configure-${PROJECT_TARGET}
COMMENT
"Configuring ${PROJECT_TARGET}"
COMMAND
${CMAKE_COMMAND}
-G ${CMAKE_GENERATOR}
-B ${PROJECT_BUILD_PATH}
-S ${PROJECT_CMAKE_PATH}
--toolchain ${PROJECT_TOOLCHAIN_PATH}
--fresh
-DCMAKE_POLICY_DEFAULT_CMP0077=NEW
CWD
${PROJECT_CWD}
SOURCES
${PROJECT_TOOLCHAIN_PATH}
OUTPUTS
${PROJECT_BUILD_PATH}/CMakeCache.txt
)
if(TARGET clone-${PROJECT_TARGET})
add_dependencies(configure-${PROJECT_TARGET} clone-${PROJECT_TARGET})
endif()
set(PROJECT_BUILD_ARGS --build ${PROJECT_BUILD_PATH})
parse_list(PROJECT_CMAKE_TARGET PROJECT_CMAKE_TARGET)
foreach(target ${PROJECT_CMAKE_TARGET})
list(APPEND PROJECT_BUILD_ARGS --target ${target})
endforeach()
if(NOT BUN_LINK_ONLY)
get_libraries(${PROJECT_TARGET} PROJECT_OUTPUTS)
endif()
register_command(
TARGET
build-${PROJECT_TARGET}
COMMENT
"Building ${PROJECT_TARGET}"
COMMAND
${CMAKE_COMMAND}
${PROJECT_BUILD_ARGS}
CWD
${PROJECT_CWD}
TARGETS
configure-${PROJECT_TARGET}
ARTIFACTS
${PROJECT_OUTPUTS}
)
add_dependencies(${PROJECT_TARGET} build-${PROJECT_TARGET})
cmake_language(EVAL CODE "cmake_language(DEFER CALL create_toolchain_file ${PROJECT_TOOLCHAIN_PATH} ${PROJECT_TARGET})")
endfunction()
# register_cmake_definitions()
# Description:
# Registers definitions, when compiling an external CMake project.
# Arguments:
# TARGET string - The target to register the definitions (if not defined, sets for all targets)
# DESCRIPTION string - The description of the definitions
# definitions string[] - The definitions to register
function(register_cmake_definitions)
set(args TARGET DESCRIPTION)
cmake_parse_arguments(CMAKE "" "${args}" "" ${ARGN})
parse_target(CMAKE_TARGET)
parse_list(CMAKE_UNPARSED_ARGUMENTS CMAKE_EXTRA_DEFINITIONS)
foreach(definition ${CMAKE_EXTRA_DEFINITIONS})
string(REGEX MATCH "^([^=]+)=(.*)$" match ${definition})
if(NOT match)
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Invalid definition: \"${definition}\"")
endif()
endforeach()
if(CMAKE_TARGET)
set(${CMAKE_TARGET}_CMAKE_DEFINITIONS ${${CMAKE_TARGET}_CMAKE_DEFINITIONS} ${CMAKE_EXTRA_DEFINITIONS} PARENT_SCOPE)
else()
set(CMAKE_DEFINITIONS ${CMAKE_DEFINITIONS} ${CMAKE_EXTRA_DEFINITIONS} PARENT_SCOPE)
endif()
endfunction()
# register_link_targets()
# Description:
# Links the libraries of one target to another.
# Arguments:
# TARGET string - The main target
# targets string[] - The targets to link to the main target
function(register_link_targets)
set(args TARGET)
cmake_parse_arguments(LINK "" "${args}" "" ${ARGN})
parse_target(LINK_TARGET)
get_target_property(type ${LINK_TARGET} TYPE)
if(NOT type MATCHES "EXECUTABLE|LIBRARY")
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Target is not an executable or library: ${LINK_TARGET}")
endif()
parse_list(LINK_UNPARSED_ARGUMENTS LINK_TARGETS)
parse_target(LINK_TARGETS)
foreach(target ${LINK_TARGETS})
get_target_property(libraries ${target} OUTPUT)
if(NOT libraries)
message(FATAL_ERROR "${CMAKE_CURRENT_FUNCTION}: Target does not have libraries: ${target}")
endif()
register_link_libraries(TARGET ${LINK_TARGET} ${libraries})
endforeach()
endfunction()
function(register_link_libraries)
set(args TARGET)
cmake_parse_arguments(LINK "" "${args}" "" ${ARGN})
parse_target(LINK_TARGET)
parse_list(LINK_UNPARSED_ARGUMENTS LINK_TARGETS)
foreach(target ${LINK_TARGETS})
target_link_libraries(${LINK_TARGET} PUBLIC ${target})
endforeach()
endfunction()
# create_toolchain_file()
# Description:
# Creates a CMake toolchain file.
# Arguments:
# filename string - The path to create the toolchain file
# target string - The target to create the toolchain file
function(create_toolchain_file filename target)
parse_path(filename)
parse_target(target)
set(lines)
if(CMAKE_TOOLCHAIN_FILE)
file(STRINGS ${CMAKE_TOOLCHAIN_FILE} lines)
list(PREPEND lines "# Copied from ${CMAKE_TOOLCHAIN_FILE}")
endif()
list(APPEND lines "# Generated from ${CMAKE_CURRENT_FUNCTION} in ${CMAKE_CURRENT_LIST_FILE}")
set(variables
CMAKE_BUILD_TYPE
CMAKE_EXPORT_COMPILE_COMMANDS
CMAKE_COLOR_DIAGNOSTICS
CMAKE_C_COMPILER
CMAKE_C_COMPILER_LAUNCHER
CMAKE_CXX_COMPILER
CMAKE_CXX_COMPILER_LAUNCHER
CMAKE_LINKER
CMAKE_AR
CMAKE_RANLIB
CMAKE_STRIP
CMAKE_OSX_SYSROOT
CMAKE_OSX_DEPLOYMENT_TARGET
)
macro(append variable value)
if(value MATCHES " ")
list(APPEND lines "set(${variable} \"${value}\")")
else()
list(APPEND lines "set(${variable} ${value})")
endif()
endmacro()
foreach(variable ${variables})
if(DEFINED ${variable})
append(${variable} ${${variable}})
endif()
endforeach()
set(flags
CMAKE_C_FLAGS
CMAKE_CXX_FLAGS
CMAKE_LINKER_FLAGS
)
foreach(flag ${flags})
set(value)
if(DEFINED ${flag})
set(value "${${flag}}")
endif()
if(DEFINED ${target}_${flag})
set(value "${value} ${${target}_${flag}}")
endif()
if(value)
append(${flag} ${value})
endif()
endforeach()
set(definitions
CMAKE_DEFINITIONS
${target}_CMAKE_DEFINITIONS
)
foreach(definition ${definitions})
foreach(entry ${${definition}})
string(REGEX MATCH "^([^=]+)=(.*)$" match ${entry})
if(NOT match)
message(FATAL_ERROR "Invalid definition: ${entry}")
endif()
append(${CMAKE_MATCH_1} ${CMAKE_MATCH_2})
endforeach()
endforeach()
list(JOIN lines "\n" lines)
file(GENERATE OUTPUT ${filename} CONTENT "${lines}\n")
endfunction()

View File

@@ -2,8 +2,8 @@ if(NOT CMAKE_SYSTEM_NAME OR NOT CMAKE_SYSTEM_PROCESSOR)
message(FATAL_ERROR "CMake included this file before project() was called")
endif()
optionx(BUN_CPP_ONLY BOOL "If only the C++ library should be built" DEFAULT OFF)
optionx(BUN_LINK_ONLY BOOL "If only the executable should be linked" DEFAULT OFF)
optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF)
optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF)
optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF)
optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
@@ -143,4 +143,15 @@ endif()
optionx(USE_STATIC_LIBATOMIC BOOL "If libatomic should be statically linked" DEFAULT ${DEFAULT_STATIC_LIBATOMIC})
if(APPLE)
set(DEFAULT_WEBKIT_ICU OFF)
else()
set(DEFAULT_WEBKIT_ICU ON)
endif()
optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAULT_WEBKIT_ICU})
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
list(APPEND CMAKE_ARGS -DCMAKE_EXPORT_COMPILE_COMMANDS=ON)

View File

@@ -1,3 +1,5 @@
# https://clang.llvm.org/docs/ClangFormat.html
find_command(
VARIABLE
CLANG_FORMAT_PROGRAM
@@ -7,6 +9,8 @@ find_command(
OFF
)
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
register_command(
TARGET
clang-format-check
@@ -17,8 +21,8 @@ register_command(
-Werror
--dry-run
--verbose
${BUN_C_SOURCES}
${BUN_CXX_SOURCES}
${CLANG_FORMAT_SOURCES}
ALWAYS_RUN
)
register_command(
@@ -30,6 +34,38 @@ register_command(
${CLANG_FORMAT_PROGRAM}
-i # edits files in-place
--verbose
${BUN_C_SOURCES}
${BUN_CXX_SOURCES}
${CLANG_FORMAT_SOURCES}
ALWAYS_RUN
)
if(GIT_CHANGED_SOURCES)
set(CLANG_FORMAT_CHANGED_SOURCES)
foreach(source ${CLANG_FORMAT_SOURCES})
list(FIND GIT_CHANGED_SOURCES ${source} index)
if(NOT ${index} EQUAL -1)
list(APPEND CLANG_FORMAT_CHANGED_SOURCES ${source})
endif()
endforeach()
endif()
if(CLANG_FORMAT_CHANGED_SOURCES)
set(CLANG_FORMAT_DIFF_COMMAND ${CLANG_FORMAT_PROGRAM}
-i # edits files in-place
--verbose
${CLANG_FORMAT_CHANGED_SOURCES}
)
else()
set(CLANG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-format")
endif()
register_command(
TARGET
clang-format-diff
COMMENT
"Running clang-format on changed files"
COMMAND
${CLANG_FORMAT_DIFF_COMMAND}
CWD
${BUILD_PATH}
ALWAYS_RUN
)

View File

@@ -13,58 +13,13 @@ find_command(
set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
find_command(
VARIABLE
GIT_PROGRAM
COMMAND
git
REQUIRED
OFF
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
-p ${BUILD_PATH}
--config-file=${CWD}/.clang-tidy
)
if(GIT_PROGRAM)
execute_process(
COMMAND
${GIT_PROGRAM}
diff
--name-only
--diff-filter=AM
main
WORKING_DIRECTORY
${CWD}
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE
GIT_CHANGED_FILES
ERROR_QUIET
)
string(REPLACE "\n" ";" GIT_CHANGED_FILES ${GIT_CHANGED_FILES})
list(TRANSFORM GIT_CHANGED_FILES PREPEND ${CWD}/)
set(CLANG_TIDY_CHANGED_SOURCES)
foreach(source ${CLANG_TIDY_SOURCES})
list(FIND GIT_CHANGED_FILES ${source} index)
if(NOT ${index} EQUAL -1)
list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source})
endif()
endforeach()
if(CLANG_TIDY_CHANGED_SOURCES)
set(CLANG_TIDY_SOURCES ${CLANG_TIDY_CHANGED_SOURCES})
else()
set(CLANG_TIDY_COMMAND ${CMAKE_COMMAND} -E echo "No files changed for clang-tidy")
endif()
endif()
if(NOT CLANG_TIDY_COMMAND)
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
-p ${BUILD_PATH}
--config-file=${CWD}/.clang-tidy
--fix
--fix-errors
--fix-notes
--use-color
${CLANG_TIDY_SOURCES}
)
if(CMAKE_COLOR_DIAGNOSTICS)
list(APPEND CLANG_TIDY_COMMAND --use-color)
endif()
register_command(
@@ -73,7 +28,58 @@ register_command(
COMMENT
"Running clang-tidy"
COMMAND
${CLANG_TIDY_COMMAND}
${CLANG_TIDY_COMMAND}
${CLANG_TIDY_SOURCES}
--fix
--fix-errors
--fix-notes
CWD
${BUILD_PATH}
ALWAYS_RUN
)
register_command(
TARGET
clang-tidy-check
COMMENT
"Checking clang-tidy"
COMMAND
${CLANG_TIDY_COMMAND}
${CLANG_TIDY_SOURCES}
CWD
${BUILD_PATH}
ALWAYS_RUN
)
if(GIT_CHANGED_SOURCES)
set(CLANG_TIDY_CHANGED_SOURCES)
foreach(source ${CLANG_TIDY_SOURCES})
list(FIND GIT_CHANGED_SOURCES ${source} index)
if(NOT ${index} EQUAL -1)
list(APPEND CLANG_TIDY_CHANGED_SOURCES ${source})
endif()
endforeach()
endif()
if(CLANG_TIDY_CHANGED_SOURCES)
set(CLANG_TIDY_DIFF_COMMAND ${CLANG_TIDY_PROGRAM}
${CLANG_TIDY_CHANGED_SOURCES}
--fix
--fix-errors
--fix-notes
)
else()
set(CLANG_TIDY_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for clang-tidy")
endif()
register_command(
TARGET
clang-tidy-diff
COMMENT
"Running clang-tidy on changed files"
COMMAND
${CLANG_TIDY_DIFF_COMMAND}
CWD
${BUILD_PATH}
ALWAYS_RUN
)

View File

@@ -0,0 +1,123 @@
if(CMAKE_HOST_WIN32)
setx(PRETTIER_EXECUTABLE ${CWD}/node_modules/.bin/prettier.exe)
else()
setx(PRETTIER_EXECUTABLE ${CWD}/node_modules/.bin/prettier)
endif()
set(PRETTIER_PATHS
${CWD}/src
${CWD}/packages/bun-error
${CWD}/packages/bun-types
${CWD}/packages/bun-inspector-protocol
${CWD}/packages/bun-inspector-frontend
${CWD}/packages/bun-debug-adapter-protocol
${CWD}/packages/bun-vscode
${CWD}/test
${CWD}/bench
${CWD}/.vscode
${CWD}/.buildkite
${CWD}/.github
)
set(PRETTIER_EXTENSIONS
*.jsonc?
*.ya?ml
*.jsx?
*.tsx?
*.mjs
*.cjs
*.mts
*.cts
)
set(PRETTIER_GLOBS)
foreach(path ${PRETTIER_PATHS})
foreach(extension ${PRETTIER_EXTENSIONS})
list(APPEND PRETTIER_GLOBS ${path}/${extension})
endforeach()
endforeach()
file(GLOB_RECURSE PRETTIER_SOURCES ${PRETTIER_GLOBS})
register_command(
COMMAND
${BUN_EXECUTABLE}
install
--frozen-lockfile
SOURCES
${CWD}/package.json
OUTPUTS
${PRETTIER_EXECUTABLE}
)
set(PRETTIER_COMMAND ${PRETTIER_EXECUTABLE}
--config=${CWD}/.prettierrc
--cache
)
register_command(
TARGET
prettier
COMMENT
"Running prettier"
COMMAND
${PRETTIER_COMMAND}
--write
${PRETTIER_SOURCES}
ALWAYS_RUN
)
register_command(
TARGET
prettier-extra
COMMENT
"Running prettier with extra plugins"
COMMAND
${PRETTIER_COMMAND}
--write
--plugin=prettier-plugin-organize-imports
${PRETTIER_SOURCES}
ALWAYS_RUN
)
register_command(
TARGET
prettier-check
COMMENT
"Checking prettier"
COMMAND
${PRETTIER_COMMAND}
--check
${PRETTIER_SOURCES}
ALWAYS_RUN
)
if(GIT_CHANGED_SOURCES)
set(PRETTIER_CHANGED_SOURCES)
foreach(source ${PRETTIER_SOURCES})
list(FIND PRETTIER_CHANGED_SOURCES ${source} index)
if(NOT ${index} EQUAL -1)
list(APPEND PRETTIER_CHANGED_SOURCES ${source})
endif()
endforeach()
endif()
if(PRETTIER_CHANGED_SOURCES)
set(PRETTIER_DIFF_COMMAND ${PRETTIER_COMMAND}
--write
--plugin=prettier-plugin-organize-imports
${PRETTIER_CHANGED_SOURCES}
)
else()
set(PRETTIER_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for prettier")
endif()
register_command(
TARGET
prettier-diff
COMMENT
"Running prettier on changed files"
COMMAND
${PRETTIER_DIFF_COMMAND}
ALWAYS_RUN
)

View File

@@ -1,3 +1,5 @@
set(ZIG_FORMAT_SOURCES ${BUN_ZIG_SOURCES})
register_command(
TARGET
zig-format-check
@@ -7,7 +9,8 @@ register_command(
${ZIG_EXECUTABLE}
fmt
--check
${BUN_ZIG_SOURCES}
${ZIG_FORMAT_SOURCES}
ALWAYS_RUN
)
register_command(
@@ -18,5 +21,37 @@ register_command(
COMMAND
${ZIG_EXECUTABLE}
fmt
${BUN_ZIG_SOURCES}
${ZIG_FORMAT_SOURCES}
ALWAYS_RUN
)
if(GIT_CHANGED_SOURCES)
set(ZIG_FORMAT_CHANGED_SOURCES)
foreach(source ${ZIG_FORMAT_SOURCES})
list(FIND GIT_CHANGED_SOURCES ${source} index)
if(NOT ${index} EQUAL -1)
list(APPEND ZIG_FORMAT_CHANGED_SOURCES ${source})
endif()
endforeach()
endif()
if(ZIG_FORMAT_CHANGED_SOURCES)
set(ZIG_FORMAT_DIFF_COMMAND ${ZIG_EXECUTABLE}
fmt
${ZIG_FORMAT_CHANGED_SOURCES}
)
else()
set(ZIG_FORMAT_DIFF_COMMAND ${CMAKE_COMMAND} -E echo "No changed files for zig-format")
endif()
register_command(
TARGET
zig-format-diff
COMMENT
"Running zig fmt on changed files"
COMMAND
${ZIG_FORMAT_DIFF_COMMAND}
CWD
${BUILD_PATH}
ALWAYS_RUN
)

View File

@@ -125,7 +125,5 @@ else()
file(RENAME ${DOWNLOAD_TMP_FILE} ${DOWNLOAD_PATH})
endif()
get_filename_component(DOWNLOAD_FILENAME ${DOWNLOAD_PATH} NAME)
message(STATUS "Saved ${DOWNLOAD_FILENAME}")
file(REMOVE_RECURSE ${DOWNLOAD_TMP_PATH})
message(STATUS "Saved ${DOWNLOAD_PATH}")

View File

@@ -1,21 +1,8 @@
get_filename_component(SCRIPT_NAME ${CMAKE_CURRENT_LIST_FILE} NAME)
message(STATUS "Running script: ${SCRIPT_NAME}")
if(NOT ZIG_PATH)
message(FATAL_ERROR "ZIG_PATH is required")
endif()
if(ZIG_REPOSITORY)
if(NOT ZIG_COMMIT)
message(FATAL_ERROR "ZIG_COMMIT is required when ZIG_REPOSITORY is set")
endif()
elseif(NOT ZIG_COMMIT)
set(ZIG_REPOSITORY "oven-sh/zig")
set(ZIG_COMMIT "131a009ba2eb127a3447d05b9e12f710429aa5ee")
endif()
if(NOT ZIG_VERSION)
set(ZIG_VERSION "0.13.0")
if(NOT ZIG_PATH OR NOT ZIG_COMMIT OR NOT ZIG_VERSION)
message(FATAL_ERROR "ZIG_PATH, ZIG_COMMIT, and ZIG_VERSION are required")
endif()
if(CMAKE_HOST_APPLE)
@@ -51,7 +38,6 @@ else()
set(ZIG_FILENAME ${ZIG_NAME}.tar.xz)
endif()
message(STATUS "Downloading ${ZIG_EXE} ${ZIG_VERSION} on ${ZIG_OS} ${ZIG_ARCH}...")
set(ZIG_DOWNLOAD_URL https://ziglang.org/download/${ZIG_VERSION}/${ZIG_FILENAME})
execute_process(
@@ -72,7 +58,7 @@ if(NOT ZIG_DOWNLOAD_RESULT EQUAL 0)
endif()
if(NOT EXISTS ${ZIG_PATH}/${ZIG_EXE})
message(FATAL_ERROR "Download failed: executable not found: \"${ZIG_PATH}/${ZIG_EXE}\"")
message(FATAL_ERROR "Executable not found: \"${ZIG_PATH}/${ZIG_EXE}\"")
endif()
# Tools like VSCode need a stable path to the zig executable, on both Unix and Windows
@@ -81,31 +67,30 @@ if(NOT WIN32)
file(CREATE_LINK ${ZIG_PATH}/${ZIG_EXE} ${ZIG_PATH}/zig.exe SYMBOLIC)
endif()
if(ZIG_REPOSITORY AND ZIG_COMMIT)
message(STATUS "Downloading zig library from ${ZIG_REPOSITORY} at ${ZIG_COMMIT}...")
set(ZIG_REPOSITORY_PATH ${ZIG_PATH}/repository)
execute_process(
COMMAND
${CMAKE_COMMAND}
-DGIT_PATH=${ZIG_PATH}/tmp
-DGIT_REPOSITORY=${ZIG_REPOSITORY}
-DGIT_COMMIT=${ZIG_COMMIT}
-P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake
ERROR_STRIP_TRAILING_WHITESPACE
ERROR_VARIABLE
ZIG_REPOSITORY_ERROR
RESULT_VARIABLE
ZIG_REPOSITORY_RESULT
)
execute_process(
COMMAND
${CMAKE_COMMAND}
-DGIT_PATH=${ZIG_REPOSITORY_PATH}
-DGIT_REPOSITORY=oven-sh/zig
-DGIT_COMMIT=${ZIG_COMMIT}
-P ${CMAKE_CURRENT_LIST_DIR}/GitClone.cmake
ERROR_STRIP_TRAILING_WHITESPACE
ERROR_VARIABLE
ZIG_REPOSITORY_ERROR
RESULT_VARIABLE
ZIG_REPOSITORY_RESULT
)
if(NOT ZIG_REPOSITORY_RESULT EQUAL 0)
message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}")
endif()
file(REMOVE_RECURSE ${ZIG_PATH}/lib)
file(RENAME ${ZIG_PATH}/tmp/lib ${ZIG_PATH}/lib)
file(REMOVE_RECURSE ${ZIG_PATH}/tmp)
message(STATUS "Saved ${ZIG_PATH}/lib")
if(NOT ZIG_REPOSITORY_RESULT EQUAL 0)
message(FATAL_ERROR "Download failed: ${ZIG_REPOSITORY_ERROR}")
endif()
message(STATUS "Saved ${ZIG_EXE}")
file(REMOVE_RECURSE ${ZIG_PATH}/lib)
# Use copy_directory instead of file(RENAME) because there were
# race conditions in CI where some files were not copied.
execute_process(COMMAND ${CMAKE_COMMAND} -E copy_directory ${ZIG_REPOSITORY_PATH}/lib ${ZIG_PATH}/lib)
file(REMOVE_RECURSE ${ZIG_REPOSITORY_PATH})

View File

@@ -1,31 +1,21 @@
register_vendor_target(boringssl)
register_repository(
NAME
${boringssl}
boringssl
REPOSITORY
oven-sh/boringssl
COMMIT
29a2cd359458c9384694b75456026e4b57e3e567
)
register_libraries(
TARGET ${boringssl}
crypto
ssl
decrepit
)
register_cmake_project(
register_cmake_command(
TARGET
${boringssl}
CMAKE_TARGET
boringssl
LIBRARIES
crypto
ssl
decrepit
)
register_cmake_definitions(
TARGET ${boringssl}
BUILD_SHARED_LIBS=OFF
ARGS
-DBUILD_SHARED_LIBS=OFF
INCLUDES
include
)

View File

@@ -1,43 +1,31 @@
register_vendor_target(brotli)
register_repository(
NAME
${brotli}
brotli
REPOSITORY
google/brotli
COMMIT
ed738e842d2fbdf2d6459e39267a633c4a9b2f5d
)
register_libraries(
TARGET ${brotli}
brotlicommon
brotlidec
brotlienc
)
register_cmake_project(
TARGET
${brotli}
CMAKE_TARGET
brotlicommon
brotlidec
brotlienc
)
register_cmake_definitions(
TARGET ${brotli}
BUILD_SHARED_LIBS=OFF
BROTLI_BUILD_TOOLS=OFF
BROTLI_EMSCRIPTEN=OFF
BROTLI_DISABLE_TESTS=ON
TAG
v1.1.0
)
# Tests fail with "BrotliDecompressionError" when LTO is enabled
# only on Linux x64 (non-baseline). It's a mystery.
if(LINUX AND CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64" AND NOT ENABLE_BASELINE)
register_compiler_flags(
TARGET ${brotli}
-fno-lto
)
if(LINUX AND CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64" AND NOT ENABLE_BASELINE)
set(BROTLI_CMAKE_ARGS "-DCMAKE_C_FLAGS=-fno-lto")
endif()
register_cmake_command(
TARGET
brotli
LIBRARIES
brotlicommon
brotlidec
brotlienc
ARGS
-DBUILD_SHARED_LIBS=OFF
-DBROTLI_BUILD_TOOLS=OFF
-DBROTLI_EMSCRIPTEN=OFF
-DBROTLI_DISABLE_TESTS=ON
${BROTLI_CMAKE_ARGS}
INCLUDES
c/include
)

View File

@@ -12,6 +12,15 @@ else()
set(bunStrip bun)
endif()
set(bunExe ${bun}${CMAKE_EXECUTABLE_SUFFIX})
if(bunStrip)
set(bunStripExe ${bunStrip}${CMAKE_EXECUTABLE_SUFFIX})
set(buns ${bun} ${bunStrip})
else()
set(buns ${bun})
endif()
# Some commands use this path, and some do not.
# In the future, change those commands so that generated files are written to this path.
optionx(CODEGEN_PATH FILEPATH "Path to the codegen directory" DEFAULT ${BUILD_PATH}/codegen)
@@ -51,6 +60,8 @@ register_command(
${BUN_ZIG_IDENTIFIER_SCRIPT}
SOURCES
${BUN_ZIG_IDENTIFIER_SOURCES}
TARGETS
clone-zig
OUTPUTS
${BUN_ZIG_IDENTIFIER_OUTPUTS}
)
@@ -463,19 +474,6 @@ WEBKIT_ADD_SOURCE_DEPENDENCIES(
${CODEGEN_PATH}/InternalModuleRegistryConstants.h
)
if(WIN32)
if(ENABLE_CANARY)
set(Bun_VERSION_WITH_TAG ${VERSION}-canary.${CANARY_REVISION})
else()
set(Bun_VERSION_WITH_TAG ${VERSION})
endif()
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
configure_file(
${CWD}/src/windows-app-info.rc
${CODEGEN_PATH}/windows-app-info.rc
)
endif()
# --- Zig ---
file(GLOB_RECURSE BUN_ZIG_SOURCES ${CONFIGURE_DEPENDS}
@@ -548,15 +546,17 @@ register_command(
-Dgenerated-code=${CODEGEN_PATH}
ARTIFACTS
${BUN_ZIG_OUTPUT}
TARGETS
clone-zig
SOURCES
${BUN_ZIG_SOURCES}
${BUN_ZIG_GENERATED_SOURCES}
)
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${CWD}/build.zig)
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
# --- C/C++ Object ---
# --- C/C++ Sources ---
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
@@ -569,6 +569,7 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/webcrypto/*.cpp
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
${CWD}/src/bun.js/bindings/v8/*.cpp
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
${CWD}/src/kit/*.cpp
${CWD}/src/deps/*.cpp
${BUN_USOCKETS_SOURCE}/src/crypto/*.cpp
@@ -581,15 +582,42 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
${BUN_USOCKETS_SOURCE}/src/crypto/*.c
)
list(APPEND BUN_C_SOURCES ${VENDOR_PATH}/picohttpparser/picohttpparser.c)
if(WIN32)
list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c)
endif()
register_repository(
NAME
picohttpparser
REPOSITORY
h2o/picohttpparser
COMMIT
066d2b1e9ab820703db0837a7255d92d30f0c9f5
OUTPUTS
picohttpparser.c
)
set(NODEJS_HEADERS_PATH ${VENDOR_PATH}/nodejs)
register_command(
TARGET
bun-node-headers
COMMENT
"Download node ${NODEJS_VERSION} headers"
COMMAND
${CMAKE_COMMAND}
-DDOWNLOAD_PATH=${NODEJS_HEADERS_PATH}
-DDOWNLOAD_URL=https://nodejs.org/dist/v${NODEJS_VERSION}/node-v${NODEJS_VERSION}-headers.tar.gz
-P ${CWD}/cmake/scripts/DownloadUrl.cmake
OUTPUTS
${NODEJS_HEADERS_PATH}/include/node/node_version.h
)
list(APPEND BUN_CPP_SOURCES
${BUN_C_SOURCES}
${BUN_CXX_SOURCES}
${VENDOR_PATH}/picohttpparser/picohttpparser.c
${NODEJS_HEADERS_PATH}/include/node/node_version.h
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
${BUN_JS_SINK_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
@@ -597,12 +625,22 @@ list(APPEND BUN_CPP_SOURCES
)
if(WIN32)
if(ENABLE_CANARY)
set(Bun_VERSION_WITH_TAG ${VERSION}-canary.${CANARY_REVISION})
else()
set(Bun_VERSION_WITH_TAG ${VERSION})
endif()
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
configure_file(
${CWD}/src/windows-app-info.rc
${CODEGEN_PATH}/windows-app-info.rc
)
list(APPEND BUN_CPP_SOURCES ${CODEGEN_PATH}/windows-app-info.rc)
endif()
# --- Executable ---
set(BUN_CPP_OUTPUT ${BUILD_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${bun}${CMAKE_STATIC_LIBRARY_SUFFIX})
set(BUN_EXE_OUTPUT ${BUILD_PATH}/${CMAKE_EXECUTABLE_PREFIX}${bun}${CMAKE_EXECUTABLE_SUFFIX})
set(BUN_EXE_STRIP_OUTPUT ${BUILD_PATH}/${CMAKE_EXECUTABLE_PREFIX}bun${CMAKE_EXECUTABLE_SUFFIX})
if(BUN_LINK_ONLY)
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT})
@@ -610,21 +648,30 @@ if(BUN_LINK_ONLY)
target_link_libraries(${bun} PRIVATE ${BUN_CPP_OUTPUT})
elseif(BUN_CPP_ONLY)
add_library(${bun} STATIC ${BUN_CPP_SOURCES})
upload_artifacts(
TARGET ${bun}
${BUN_CPP_OUTPUT}
register_command(
TARGET
${bun}
TARGET_PHASE
POST_BUILD
COMMENT
"Uploading ${bun}"
COMMAND
${CMAKE_COMMAND} -E true
ARTIFACTS
${BUN_CPP_OUTPUT}
)
else()
add_executable(${bun} ${BUN_CPP_SOURCES})
target_link_libraries(${bun} PRIVATE ${BUN_ZIG_OUTPUT})
upload_artifacts(
TARGET ${bun}
${BUN_EXE_OUTPUT}
)
endif()
if(NOT bun STREQUAL "bun")
add_custom_target(bun DEPENDS ${bun})
endif()
# --- C/C++ Properties ---
set_target_properties(${bun} PROPERTIES
OUTPUT_NAME ${bun}
CXX_STANDARD 20
CXX_STANDARD_REQUIRED YES
CXX_EXTENSIONS YES
@@ -634,17 +681,6 @@ set_target_properties(${bun} PROPERTIES
VISIBILITY_INLINES_HIDDEN YES
)
if(BUN_LINK_ONLY)
set_target_properties(${bun} PROPERTIES
OUTPUT_NAME ${bun}
LINKER_LANGUAGE CXX
)
endif()
if(NOT bun STREQUAL "bun")
add_custom_target(bun DEPENDS ${bun})
endif()
# --- C/C++ Includes ---
if(WIN32)
@@ -659,6 +695,7 @@ target_include_directories(${bun} PRIVATE
${CWD}/src/bun.js/bindings/webcore
${CWD}/src/bun.js/bindings/webcrypto
${CWD}/src/bun.js/bindings/sqlite
${CWD}/src/bun.js/bindings/v8
${CWD}/src/bun.js/modules
${CWD}/src/js/builtins
${CWD}/src/napi
@@ -666,6 +703,7 @@ target_include_directories(${bun} PRIVATE
${CODEGEN_PATH}
${VENDOR_PATH}
${VENDOR_PATH}/picohttpparser
${NODEJS_HEADERS_PATH}/include
)
# --- C/C++ Definitions ---
@@ -699,6 +737,7 @@ target_compile_definitions(${bun} PRIVATE
WITH_BORINGSSL=1
STATICALLY_LINKED_WITH_JavaScriptCore=1
STATICALLY_LINKED_WITH_BMALLOC=1
BUILDING_WITH_CMAKE=1
JSC_OBJC_API_ENABLED=0
BUN_SINGLE_THREADED_PER_VM_ENTRY_SCOPE=1
NAPI_EXPERIMENTAL=ON
@@ -715,6 +754,7 @@ if(DEBUG AND NOT CI)
)
endif()
# --- Compiler options ---
if(NOT WIN32)
@@ -889,6 +929,8 @@ set_target_properties(${bun} PROPERTIES LINK_DEPENDS ${BUN_SYMBOLS_PATH})
# --- WebKit ---
include(SetupWebKit)
if(WIN32)
if(DEBUG)
target_link_libraries(${bun} PRIVATE
@@ -925,38 +967,34 @@ endif()
# --- Dependencies ---
register_link_targets(
TARGET ${bun}
${boringssl}
${brotli}
${cares}
${libarchive}
${libdeflate}
${libuv} ${WIN32}
${lolhtml}
${lshpack}
${mimalloc}
${tinycc}
${sqlite} ${USE_STATIC_SQLITE}
${webkit}
${zlib}
${zstd}
set(BUN_DEPENDENCIES
BoringSSL
Brotli
Cares
LibDeflate
LolHtml
Lshpack
Mimalloc
TinyCC
Zlib
LibArchive # must be loaded after zlib
Zstd
)
register_includes(
TARGET ${bun}
${${picohttpparser}_CWD}
${${boringssl}_CWD}/include
${${brotli}_CWD}/c/include
${${cares}_CWD}/include
${${libarchive}_CWD}/include
${${libdeflate}_CWD}
${${libuv}_CWD}/include ${WIN32}
${${lshpack}_CWD}
${${lshpack}_CWD}/compat/queue ${WIN32}
${${mimalloc}_CWD}/include
${${zlib}_CWD}
)
if(WIN32)
list(APPEND BUN_DEPENDENCIES Libuv)
endif()
if(USE_STATIC_SQLITE)
list(APPEND BUN_DEPENDENCIES SQLite)
endif()
foreach(dependency ${BUN_DEPENDENCIES})
include(Build${dependency})
endforeach()
list(TRANSFORM BUN_DEPENDENCIES TOLOWER OUTPUT_VARIABLE BUN_TARGETS)
add_custom_target(dependencies DEPENDS ${BUN_TARGETS})
if(APPLE)
target_link_libraries(${bun} PRIVATE icucore resolv)
@@ -1002,12 +1040,6 @@ endif()
# --- Packaging ---
if(bunStrip)
set(buns ${bun} ${bunStrip})
else()
set(buns ${bun})
endif()
if(NOT BUN_CPP_ONLY)
if(bunStrip)
register_command(
@@ -1019,15 +1051,15 @@ if(NOT BUN_CPP_ONLY)
"Stripping ${bun}"
COMMAND
${CMAKE_STRIP}
${BUN_EXE_OUTPUT}
${bunExe}
--strip-all
--strip-debug
--discard-all
-o ${BUN_EXE_STRIP_OUTPUT}
-o ${bunStripExe}
CWD
${BUILD_PATH}
OUTPUTS
${BUN_EXE_STRIP_OUTPUT}
${BUILD_PATH}/${bunStripExe}
)
endif()
@@ -1040,9 +1072,9 @@ if(NOT BUN_CPP_ONLY)
"Testing ${bun}"
COMMAND
${CMAKE_COMMAND}
-E env BUN_DEBUG_QUIET_LOGS=1
${BUN_EXE_OUTPUT}
--revision
-E env BUN_DEBUG_QUIET_LOGS=1
${BUILD_PATH}/${bunExe}
--revision
CWD
${BUILD_PATH}
)
@@ -1062,7 +1094,7 @@ if(NOT BUN_CPP_ONLY)
BUN_GARBAGE_COLLECTOR_LEVEL=1
BUN_DEBUG_QUIET_LOGS=1
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING=1
${BUN_EXE_OUTPUT}
${BUILD_PATH}/${bunExe}
${BUN_FEATURES_SCRIPT}
CWD
${BUILD_PATH}
@@ -1071,7 +1103,7 @@ if(NOT BUN_CPP_ONLY)
)
endif()
if(bunStrip AND APPLE)
if(CMAKE_HOST_APPLE AND bunStrip)
register_command(
TARGET
${bun}
@@ -1101,7 +1133,7 @@ if(NOT BUN_CPP_ONLY)
set(bunTriplet bun-${OS}-${ARCH})
endif()
string(REPLACE bun ${bunTriplet} bunPath ${bun})
set(bunFiles ${BUN_EXE_OUTPUT} features.json)
set(bunFiles ${bunExe} features.json)
if(WIN32)
list(APPEND bunFiles ${bun}.pdb)
elseif(APPLE)
@@ -1138,7 +1170,7 @@ if(NOT BUN_CPP_ONLY)
COMMAND
${CMAKE_COMMAND} -E rm -rf ${bunStripPath} ${bunStripPath}.zip
&& ${CMAKE_COMMAND} -E make_directory ${bunStripPath}
&& ${CMAKE_COMMAND} -E copy ${${BUN_EXE_STRIP_OUTPUT}} ${bunStripPath}
&& ${CMAKE_COMMAND} -E copy ${bunStripExe} ${bunStripPath}
&& ${CMAKE_COMMAND} -E tar cfv ${bunStripPath}.zip --format=zip ${bunStripPath}
&& ${CMAKE_COMMAND} -E rm -rf ${bunStripPath}
CWD

View File

@@ -1,32 +1,27 @@
register_vendor_target(cares)
register_repository(
NAME
${cares}
cares
REPOSITORY
c-ares/c-ares
COMMIT
d1722e6e8acaf10eb73fa995798a9cd421d9f85e
)
register_libraries(
TARGET ${cares}
PATH lib
cares
)
register_cmake_project(
register_cmake_command(
TARGET
${cares}
CMAKE_TARGET
cares
TARGETS
c-ares
)
register_cmake_definitions(
TARGET ${cares}
CARES_STATIC=ON
CARES_STATIC_PIC=ON
CARES_SHARED=OFF
CARES_BUILD_TOOLS=OFF
CMAKE_POSITION_INDEPENDENT_CODE=ON
ARGS
-DCARES_STATIC=ON
-DCARES_STATIC_PIC=ON # FORCE_PIC was set to 1, but CARES_STATIC_PIC was set to OFF??
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DCARES_SHARED=OFF
-DCARES_BUILD_TOOLS=OFF # this was set to ON?
LIB_PATH
lib
LIBRARIES
cares
INCLUDES
include
)

View File

@@ -1,61 +1,53 @@
register_vendor_target(libarchive)
register_repository(
NAME
${libarchive}
libarchive
REPOSITORY
libarchive/libarchive
COMMIT
898dc8319355b7e985f68a9819f182aaed61b53a
)
register_libraries(
TARGET ${libarchive}
PATH libarchive
archive
)
register_cmake_project(
register_cmake_command(
TARGET
${libarchive}
CMAKE_TARGET
libarchive
TARGETS
archive_static
ARGS
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DBUILD_SHARED_LIBS=OFF
-DENABLE_INSTALL=OFF
-DENABLE_TEST=OFF
-DENABLE_WERROR=OFF
-DENABLE_BZIP2=OFF
-DENABLE_CAT=OFF
-DENABLE_EXPAT=OFF
-DENABLE_ICONV=OFF
-DENABLE_LIBB2=OFF
-DENABLE_LibGCC=OFF
-DENABLE_LIBXML2=OFF
-DENABLE_LZ4=OFF
-DENABLE_LZMA=OFF
-DENABLE_LZO=OFF
-DENABLE_MBEDTLS=OFF
-DENABLE_NETTLE=OFF
-DENABLE_OPENSSL=OFF
-DENABLE_PCRE2POSIX=OFF
-DENABLE_PCREPOSIX=OFF
-DENABLE_ZSTD=OFF
# libarchive depends on zlib headers, otherwise it will
# spawn a processes to compress instead of using the library.
-DENABLE_ZLIB=OFF
-DHAVE_ZLIB_H=ON
-DCMAKE_C_FLAGS="-I${VENDOR_PATH}/zlib"
LIB_PATH
libarchive
LIBRARIES
archive
INCLUDES
include
)
register_cmake_definitions(
TARGET ${libarchive}
CMAKE_POSITION_INDEPENDENT_CODE=ON
BUILD_SHARED_LIBS=OFF
ENABLE_INSTALL=OFF
ENABLE_TEST=OFF
ENABLE_WERROR=OFF
ENABLE_BZIP2=OFF
ENABLE_CAT=OFF
ENABLE_EXPAT=OFF
ENABLE_ICONV=OFF
ENABLE_LIBB2=OFF
ENABLE_LibGCC=OFF
ENABLE_LIBXML2=OFF
ENABLE_LZ4=OFF
ENABLE_LZMA=OFF
ENABLE_LZO=OFF
ENABLE_MBEDTLS=OFF
ENABLE_NETTLE=OFF
ENABLE_OPENSSL=OFF
ENABLE_PCRE2POSIX=OFF
ENABLE_PCREPOSIX=OFF
ENABLE_ZSTD=OFF
ENABLE_ZLIB=OFF
HAVE_ZLIB_H=ON
)
# libarchive depends on zlib headers, otherwise it will
# spawn a processes to compress instead of using the library.
register_includes(
TARGET ${libarchive}
${VENDOR_PATH}/${zlib}
)
if(TARGET clone-${zlib})
add_dependencies(${libarchive} clone-${zlib})
# Must be loaded after zlib is defined
if(TARGET clone-zlib)
add_dependencies(libarchive clone-zlib)
endif()

View File

@@ -1,30 +1,24 @@
register_vendor_target(libdeflate)
register_repository(
NAME
${libdeflate}
libdeflate
REPOSITORY
ebiggers/libdeflate
COMMIT
dc76454a39e7e83b68c3704b6e3784654f8d5ac5
)
register_libraries(
TARGET ${libdeflate}
deflatestatic ${WIN32}
deflate ${UNIX}
)
register_cmake_project(
register_cmake_command(
TARGET
${libdeflate}
CMAKE_TARGET
libdeflate
TARGETS
libdeflate_static
)
register_cmake_definitions(
TARGET ${libdeflate}
LIBDEFLATE_BUILD_STATIC_LIB=ON
LIBDEFLATE_BUILD_SHARED_LIB=OFF
LIBDEFLATE_BUILD_GZIP=OFF
ARGS
-DLIBDEFLATE_BUILD_STATIC_LIB=ON
-DLIBDEFLATE_BUILD_SHARED_LIB=OFF
-DLIBDEFLATE_BUILD_GZIP=OFF
LIBRARIES
deflatestatic WIN32
deflate UNIX
INCLUDES
.
)

View File

@@ -1,39 +1,29 @@
register_vendor_target(libuv)
register_repository(
NAME
${libuv}
libuv
REPOSITORY
libuv/libuv
COMMIT
da527d8d2a908b824def74382761566371439003
)
register_libraries(
TARGET ${libuv}
uv_a ${WIN32}
uv ${UNIX}
)
register_cmake_project(
TARGET
${libuv}
CMAKE_TARGET
uv_a
)
register_cmake_definitions(
TARGET ${libuv}
LIBUV_BUILD_SHARED=OFF
LIBUV_BUILD_TESTS=OFF
LIBUV_BUILD_BENCH=OFF
)
if(WIN32)
register_compiler_flags(
TARGET ${libuv}
/DWIN32
/D_WINDOWS
-Wno-int-conversion
)
set(LIBUV_CMAKE_C_FLAGS "/DWIN32 /D_WINDOWS -Wno-int-conversion")
endif()
register_cmake_command(
TARGET
libuv
TARGETS
uv_a
ARGS
-DLIBUV_BUILD_SHARED=OFF
-DLIBUV_BUILD_TESTS=OFF
-DLIBUV_BUILD_BENCH=OFF
-DCMAKE_C_FLAGS=${LIBUV_CMAKE_C_FLAGS}
LIBRARIES
libuv WIN32
uv UNIX
INCLUDES
include
)

View File

@@ -1,50 +1,45 @@
register_vendor_target(lolhtml)
register_repository(
NAME
${lolhtml}
lolhtml
REPOSITORY
cloudflare/lol-html
COMMIT
8d4c273ded322193d017042d1f48df2766b0f88b
)
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
set(LOLHTML_BUILD_PATH ${BUILD_PATH}/lolhtml)
if(DEBUG)
set(${lolhtml}_BUILD_TYPE debug)
set(LOLHTML_BUILD_TYPE debug)
else()
set(${lolhtml}_BUILD_TYPE release)
set(LOLHTML_BUILD_TYPE release)
endif()
register_libraries(
TARGET ${lolhtml}
PATH ${${lolhtml}_BUILD_TYPE}
VARIABLE ${lolhtml}_LIBRARY
lolhtml
)
set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX})
set(${lolhtml}_BUILD_COMMAND
${CARGO_EXECUTABLE}
build
--target-dir ${${lolhtml}_BUILD_PATH}
set(LOLHTML_BUILD_ARGS
--target-dir ${BUILD_PATH}/lolhtml
)
if(RELEASE)
list(APPEND ${lolhtml}_BUILD_COMMAND --release)
list(APPEND LOLHTML_BUILD_ARGS --release)
endif()
register_command(
TARGET
build-${lolhtml}
lolhtml
CWD
${${lolhtml}_CWD}/c-api
${LOLHTML_CWD}
COMMAND
${${lolhtml}_BUILD_COMMAND}
${CARGO_EXECUTABLE}
build
${LOLHTML_BUILD_ARGS}
ARTIFACTS
${${lolhtml}_LIBRARY}
${LOLHTML_LIBRARY}
)
if(TARGET clone-${lolhtml})
add_dependencies(build-${lolhtml} clone-${lolhtml})
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})
if(BUN_LINK_ONLY)
target_sources(${bun} PRIVATE ${LOLHTML_LIBRARY})
endif()
add_dependencies(${lolhtml} build-${lolhtml})

View File

@@ -1,42 +1,33 @@
register_vendor_target(lshpack)
register_repository(
NAME
${lshpack}
lshpack
REPOSITORY
litespeedtech/ls-hpack
COMMIT
3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0
)
register_libraries(
TARGET ${lshpack}
ls-hpack
)
register_cmake_project(
TARGET
${lshpack}
CMAKE_TARGET
ls-hpack
)
register_cmake_definitions(
TARGET ${lshpack}
SHARED=OFF
LSHPACK_XXH=ON
BUILD_TESTING=OFF
)
# FIXME: There are linking errors when built with non-Release
# Undefined symbols for architecture arm64:
# "___asan_handle_no_return", referenced from:
# _lshpack_enc_get_static_nameval in libls-hpack.a(lshpack.c.o)
# _lshpack_enc_get_static_name in libls-hpack.a(lshpack.c.o)
# _update_hash in libls-hpack.a(lshpack.c.o)
if(NOT CMAKE_BUILD_TYPE STREQUAL "Release")
register_cmake_definitions(
TARGET ${lshpack}
CMAKE_BUILD_TYPE=Release
)
if(WIN32)
set(LSHPACK_INCLUDES . compat/queue)
else()
set(LSHPACK_INCLUDES .)
endif()
register_cmake_command(
TARGET
lshpack
LIBRARIES
ls-hpack
ARGS
-DSHARED=OFF
-DLSHPACK_XXH=ON
# There are linking errors when built with non-Release
# Undefined symbols for architecture arm64:
# "___asan_handle_no_return", referenced from:
# _lshpack_enc_get_static_nameval in libls-hpack.a(lshpack.c.o)
# _lshpack_enc_get_static_name in libls-hpack.a(lshpack.c.o)
# _update_hash in libls-hpack.a(lshpack.c.o)
-DCMAKE_BUILD_TYPE=Release
INCLUDES
${LSHPACK_INCLUDES}
)

View File

@@ -1,63 +1,60 @@
register_vendor_target(mimalloc)
register_repository(
NAME
${mimalloc}
mimalloc
REPOSITORY
oven-sh/mimalloc
COMMIT
4c283af60cdae205df5a872530c77e2a6a307d43
)
set(MIMALLOC_CMAKE_ARGS
-DMI_BUILD_STATIC=ON
-DMI_BUILD_OBJECT=ON
-DMI_BUILD_SHARED=OFF
-DMI_BUILD_TESTS=OFF
-DMI_USE_CXX=ON
-DMI_OVERRIDE=OFF
-DMI_OSX_ZONE=OFF
-DMI_OSX_INTERPOSE=OFF
-DMI_SKIP_COLLECT_ON_EXIT=ON
)
if(DEBUG)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_FULL=ON)
endif()
if(ENABLE_VALGRIND)
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
endif()
if(WIN32)
if(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-static-debug)
else()
set(MIMALLOC_LIBRARY mimalloc-static)
endif()
elseif(DEBUG)
set(MIMALLOC_LIBRARY mimalloc-debug)
else()
set(MIMALLOC_LIBRARY mimalloc)
endif()
# Workaround for linker issue on macOS and Linux x64
# https://github.com/microsoft/mimalloc/issues/512
if(APPLE OR (LINUX AND NOT DEBUG))
register_libraries(
TARGET ${mimalloc}
PATH CMakeFiles/mimalloc-obj.dir/src
static.c.o
)
else()
register_libraries(
TARGET ${mimalloc}
mimalloc-static-debug ${WIN32} AND ${DEBUG}
mimalloc-static ${WIN32} AND ${RELEASE}
mimalloc-debug ${UNIX} AND ${DEBUG}
mimalloc ${UNIX} AND ${RELEASE}
)
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
endif()
register_cmake_project(
register_cmake_command(
TARGET
${mimalloc}
CMAKE_TARGETS
mimalloc
TARGETS
mimalloc-static
mimalloc-obj
ARGS
${MIMALLOC_CMAKE_ARGS}
LIBRARIES
${MIMALLOC_LIBRARY}
INCLUDES
include
)
register_cmake_definitions(
TARGET ${mimalloc}
MI_BUILD_STATIC=ON
MI_BUILD_OBJECT=ON
MI_BUILD_SHARED=OFF
MI_BUILD_TESTS=OFF
MI_USE_CXX=ON
MI_OVERRIDE=OFF
MI_OSX_ZONE=OFF
MI_OSX_INTERPOSE=OFF
MI_SKIP_COLLECT_ON_EXIT=ON
)
if(ENABLE_ASSERTIONS)
register_cmake_definitions(
TARGET ${mimalloc}
MI_DEBUG_FULL=ON
MI_SHOW_ERRORS=ON
)
if(ENABLE_VALGRIND)
register_cmake_definitions(
TARGET ${mimalloc}
MI_VALGRIND=ON
)
endif()
endif()

View File

@@ -1,12 +0,0 @@
register_vendor_target(picohttpparser)
register_repository(
NAME
${picohttpparser}
REPOSITORY
h2o/picohttpparser
COMMIT
066d2b1e9ab820703db0837a7255d92d30f0c9f5
OUTPUTS
picohttpparser.c
)

View File

@@ -1,13 +1,10 @@
register_vendor_target(sqlite)
register_libraries(
TARGET ${sqlite}
sqlite3
)
register_cmake_project(
register_cmake_command(
TARGET
${sqlite}
sqlite
CWD
${CWD}/src/bun.js/bindings/sqlite
LIBRARIES
sqlite3
INCLUDES
.
)

View File

@@ -1,20 +1,15 @@
register_vendor_target(tinycc)
register_repository(
NAME
${tinycc}
tinycc
REPOSITORY
oven-sh/tinycc
COMMIT
29985a3b59898861442fa3b43f663fc1af2591d7
)
register_libraries(
TARGET ${tinycc}
tcc
)
register_cmake_project(
register_cmake_command(
TARGET
${tinycc}
tinycc
LIBRARIES
tcc
)

View File

@@ -1,88 +0,0 @@
optionx(WEBKIT_LOCAL BOOL "If a local version of WebKit should be used instead of downloading" DEFAULT OFF)
optionx(WEBKIT_VERSION STRING "The version of WebKit to use" DEFAULT "4a2db3254a9535949a5d5380eb58cf0f77c8e15a")
if(WEBKIT_LOCAL)
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
else()
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/webkit)
endif()
optionx(WEBKIT_PATH FILEPATH "The path to the WebKit directory" DEFAULT ${DEFAULT_WEBKIT_PATH})
set(WEBKIT_INCLUDE_PATH ${WEBKIT_PATH}/include)
set(WEBKIT_LIB_PATH ${WEBKIT_PATH}/lib)
register_vendor_target(webkit)
register_libraries(
TARGET ${webkit}
PATH ${WEBKIT_PATH}/lib
JavaScriptCore
WTF
bmalloc ${LINUX}
)
if(WIN32)
register_libraries(
TARGET ${webkit}
PATH ${WEBKIT_PATH}/lib
sicudt ${RELEASE}
sicudtd ${DEBUG}
sicuin ${RELEASE}
sicuind ${DEBUG}
sicuuc ${RELEASE}
sicuucd ${DEBUG}
)
endif()
if(WEBKIT_LOCAL)
# Must be built seperately, in the future this can be integrated into the build process
register_target(build-webkit)
else()
if(WIN32)
set(WEBKIT_OS "windows")
elseif(APPLE)
set(WEBKIT_OS "macos")
elseif(LINUX)
set(WEBKIT_OS "linux")
else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
set(WEBKIT_ARCH "arm64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
set(WEBKIT_ARCH "amd64")
else()
unsupported(CMAKE_SYSTEM_PROCESSOR)
endif()
if(DEBUG)
set(WEBKIT_SUFFIX "-debug")
elseif(ENABLE_LTO AND NOT WIN32)
set(WEBKIT_SUFFIX "-lto")
else()
set(WEBKIT_SUFFIX "")
endif()
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_NAME}.tar.gz)
get_libraries(${webkit} WEBKIT_LIBRARIES)
register_command(
TARGET
clone-${webkit}
COMMENT
"Downloading ${WEBKIT_NAME}"
COMMAND
${CMAKE_COMMAND}
-DDOWNLOAD_PATH=${WEBKIT_PATH}
-DDOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
-P ${CWD}/cmake/scripts/DownloadUrl.cmake
OUTPUTS
${WEBKIT_PATH}/package.json
${WEBKIT_LIBRARIES}
)
register_outputs(TARGET clone-${webkit} ${WEBKIT_PATH})
endif()

View File

@@ -1,38 +1,40 @@
register_vendor_target(zlib)
register_repository(
NAME
${zlib}
zlib
REPOSITORY
cloudflare/zlib
COMMIT
886098f3f339617b4243b286f5ed364b9989e245
)
register_libraries(
TARGET ${zlib}
z ${UNIX}
zlib ${WIN32} AND ${RELEASE}
zlibd ${WIN32} AND ${DEBUG}
)
register_cmake_project(
TARGET
${zlib}
CMAKE_TARGET
zlib
)
register_cmake_definitions(
TARGET ${zlib}
BUILD_SHARED_LIBS=OFF
BUILD_EXAMPLES=OFF
)
# https://gitlab.kitware.com/cmake/cmake/-/issues/25755
if(APPLE)
register_compiler_flags(
TARGET ${zlib}
-fno-define-target-os-macros
)
set(ZLIB_CMAKE_C_FLAGS "-fno-define-target-os-macros")
set(ZLIB_CMAKE_CXX_FLAGS "-fno-define-target-os-macros")
endif()
if(WIN32)
if(DEBUG)
set(ZLIB_LIBRARY "zlibd")
else()
set(ZLIB_LIBRARY "zlib")
endif()
else()
set(ZLIB_LIBRARY "z")
endif()
register_cmake_command(
TARGET
zlib
TARGETS
zlib
ARGS
-DBUILD_SHARED_LIBS=OFF
-DBUILD_EXAMPLES=OFF
"-DCMAKE_C_FLAGS=${ZLIB_CMAKE_C_FLAGS}"
"-DCMAKE_CXX_FLAGS=${ZLIB_CMAKE_CXX_FLAGS}"
LIBRARIES
${ZLIB_LIBRARY}
INCLUDES
.
)

View File

@@ -1,34 +1,26 @@
register_vendor_target(zstd)
register_repository(
NAME
${zstd}
zstd
REPOSITORY
facebook/zstd
COMMIT
794ea1b0afca0f020f4e57b6732332231fb23c70
)
register_libraries(
TARGET ${zstd}
PATH lib
zstd_static ${WIN32}
zstd ${UNIX}
)
register_cmake_project(
register_cmake_command(
TARGET
${zstd}
CMAKE_TARGET
zstd
TARGETS
libzstd_static
CMAKE_PATH
build/cmake
)
register_cmake_definitions(
TARGET ${zstd}
ZSTD_BUILD_STATIC=ON
ZSTD_BUILD_PROGRAMS=OFF
ZSTD_BUILD_TESTS=OFF
ZSTD_BUILD_CONTRIB=OFF
ARGS
-Sbuild/cmake
-DZSTD_BUILD_STATIC=ON
-DZSTD_BUILD_PROGRAMS=OFF
-DZSTD_BUILD_TESTS=OFF
-DZSTD_BUILD_CONTRIB=OFF
LIB_PATH
lib
LIBRARIES
zstd_static WIN32
zstd UNIX
)

View File

@@ -1,4 +1,6 @@
if(NOT BUN_LINK_ONLY)
optionx(BUILDKITE_CACHE BOOL "If the build can use Buildkite caches, even if not running in Buildkite" DEFAULT ${BUILDKITE})
if(NOT BUILDKITE_CACHE OR NOT BUN_LINK_ONLY)
return()
endif()
@@ -31,7 +33,7 @@ if(NOT BUILDKITE_BUILD_ID)
endif()
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/${BUILDKITE_BUILD_ID})
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
file(
DOWNLOAD ${BUILDKITE_BUILD_URL}
@@ -123,14 +125,15 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID})
endif()
message(STATUS "Downloading ${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}")
get_filename_component(BUILDKITE_ARTIFACT_NAME ${BUILDKITE_ARTIFACT_PATH} NAME_WE)
register_command(
TARGET download-${BUILDKITE_ARTIFACT_NAME}
COMMENT "Downloading ${BUILDKITE_ARTIFACT_PATH}"
COMMAND ${BUILDKITE_DOWNLOAD_COMMAND}
CWD ${BUILD_PATH}
OUTPUTS ${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
add_custom_command(
COMMENT
"Downloading ${BUILDKITE_ARTIFACT_PATH}"
VERBATIM COMMAND
${BUILDKITE_DOWNLOAD_COMMAND}
WORKING_DIRECTORY
${BUILD_PATH}
OUTPUT
${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
)
endforeach()

View File

@@ -14,8 +14,11 @@ find_command(
ON
)
setx(CMAKE_C_COMPILER_LAUNCHER ${CCACHE_PROGRAM})
setx(CMAKE_CXX_COMPILER_LAUNCHER ${CCACHE_PROGRAM})
set(CCACHE_ARGS CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER)
foreach(arg ${CCACHE_ARGS})
setx(${arg} ${CCACHE_PROGRAM})
list(APPEND CMAKE_ARGS -D${arg}=${${arg}})
endforeach()
setenv(CCACHE_DIR ${CACHE_PATH}/ccache)
setenv(CCACHE_BASEDIR ${CWD})

View File

@@ -0,0 +1,38 @@
find_command(
VARIABLE
GIT_PROGRAM
COMMAND
git
REQUIRED
OFF
)
if(NOT GIT_PROGRAM)
return()
endif()
set(GIT_DIFF_COMMAND ${GIT_PROGRAM} diff --no-color --name-only --diff-filter=AMCR)
execute_process(
COMMAND
${GIT_DIFF_COMMAND}
WORKING_DIRECTORY
${CWD}
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE
GIT_DIFF
ERROR_STRIP_TRAILING_WHITESPACE
ERROR_VARIABLE
GIT_DIFF_ERROR
RESULT_VARIABLE
GIT_DIFF_RESULT
)
if(NOT GIT_DIFF_RESULT EQUAL 0)
message(${WARNING} "Command failed: ${GIT_DIFF_COMMAND} ${GIT_DIFF_ERROR}")
return()
endif()
string(REPLACE "\n" ";" GIT_CHANGED_SOURCES "${GIT_DIFF}")
list(TRANSFORM GIT_CHANGED_SOURCES PREPEND ${CWD}/)
list(LENGTH GIT_CHANGED_SOURCES GIT_CHANGED_SOURCES_COUNT)

View File

@@ -1,3 +1,9 @@
optionx(SKIP_LLVM BOOL "If LLVM setup should be skipped" DEFAULT OFF)
if(SKIP_LLVM)
return()
endif()
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE)
set(DEFAULT_LLVM_VERSION "18.1.8")
else()
@@ -33,6 +39,7 @@ macro(find_llvm_command VARIABLE COMMAND)
PATHS ${LLVM_PATH}
VERSION ${LLVM_VERSION}
)
list(APPEND CMAKE_ARGS -D${VARIABLE}=${${VARIABLE}})
endmacro()
macro(find_llvm_command_no_version VARIABLE COMMAND)
@@ -42,6 +49,7 @@ macro(find_llvm_command_no_version VARIABLE COMMAND)
PATHS ${LLVM_PATH}
REQUIRED ON
)
list(APPEND CMAKE_ARGS -D${VARIABLE}=${${VARIABLE}})
endmacro()
if(WIN32)

View File

@@ -52,3 +52,8 @@ if(CMAKE_OSX_SYSROOT_ERROR)
endif()
optionx(CMAKE_OSX_SYSROOT STRING "The macOS SDK path to target" DEFAULT ${DEFAULT_CMAKE_OSX_SYSROOT})
list(APPEND CMAKE_ARGS
-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET}
-DCMAKE_OSX_SYSROOT=${CMAKE_OSX_SYSROOT}
)

View File

@@ -0,0 +1,86 @@
option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 4a2db3254a9535949a5d5380eb58cf0f77c8e15a)
endif()
if(WEBKIT_LOCAL)
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
else()
set(DEFAULT_WEBKIT_PATH ${CACHE_PATH}/webkit-${WEBKIT_VERSION})
endif()
option(WEBKIT_PATH "The path to the WebKit directory")
if(NOT WEBKIT_PATH)
set(WEBKIT_PATH ${DEFAULT_WEBKIT_PATH})
endif()
set(WEBKIT_INCLUDE_PATH ${WEBKIT_PATH}/include)
set(WEBKIT_LIB_PATH ${WEBKIT_PATH}/lib)
if(WEBKIT_LOCAL)
if(EXISTS ${WEBKIT_PATH}/cmakeconfig.h)
# You may need to run:
# make jsc-compile-debug jsc-copy-headers
include_directories(
${WEBKIT_PATH}
${WEBKIT_PATH}/JavaScriptCore/Headers/JavaScriptCore
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders
${WEBKIT_PATH}/bmalloc/Headers
${WEBKIT_PATH}/WTF/Headers
)
endif()
# After this point, only prebuilt WebKit is supported
return()
endif()
if(EXISTS ${WEBKIT_PATH}/package.json)
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
if(WEBKIT_PACKAGE_JSON MATCHES ${WEBKIT_VERSION})
return()
endif()
endif()
if(WIN32)
set(WEBKIT_OS "windows")
elseif(APPLE)
set(WEBKIT_OS "macos")
elseif(UNIX)
set(WEBKIT_OS "linux")
else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
set(WEBKIT_ARCH "arm64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
set(WEBKIT_ARCH "amd64")
else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
if(DEBUG)
set(WEBKIT_SUFFIX "-debug")
elseif(ENABLE_LTO AND NOT WIN32)
set(WEBKIT_SUFFIX "-lto")
else()
set(WEBKIT_SUFFIX "")
endif()
set(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
file(DOWNLOAD ${WEBKIT_DOWNLOAD_URL} ${CACHE_PATH}/${WEBKIT_FILENAME} SHOW_PROGRESS)
file(ARCHIVE_EXTRACT INPUT ${CACHE_PATH}/${WEBKIT_FILENAME} DESTINATION ${CACHE_PATH} TOUCH)
file(REMOVE ${CACHE_PATH}/${WEBKIT_FILENAME})
file(REMOVE_RECURSE ${WEBKIT_PATH})
file(RENAME ${CACHE_PATH}/bun-webkit ${WEBKIT_PATH})
if(APPLE)
file(REMOVE_RECURSE ${WEBKIT_INCLUDE_PATH}/unicode)
endif()

View File

@@ -16,6 +16,8 @@ else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
optionx(ZIG_VERSION STRING "The zig version of the compiler to download" DEFAULT "0.13.0")
optionx(ZIG_COMMIT STRING "The zig commit to use in oven-sh/zig" DEFAULT "131a009ba2eb127a3447d05b9e12f710429aa5ee")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")
@@ -64,12 +66,18 @@ set(CMAKE_ZIG_FLAGS
)
register_command(
TARGET
clone-zig
COMMENT
"Downloading zig"
COMMAND
${CMAKE_COMMAND}
-DZIG_PATH=${ZIG_PATH}
-DZIG_VERSION=${ZIG_VERSION}
-DZIG_COMMIT=${ZIG_COMMIT}
-P ${CWD}/cmake/scripts/DownloadZig.cmake
SOURCES
${CWD}/cmake/scripts/DownloadZig.cmake
OUTPUTS
${ZIG_EXECUTABLE}
)

262
docs/api/color.md Normal file
View File

@@ -0,0 +1,262 @@
`Bun.color(input, outputFormat?)` leverages Bun's CSS parser to parse, normalize, and convert colors from user input to a variety of output formats, including:
| Format | Example |
| ------------ | -------------------------------- |
| `"css"` | `"red"` |
| `"ansi"` | `"\x1b[38;2;255;0;0m"` |
| `"ansi-16"` | `"\x1b[38;5;\tm"` |
| `"ansi-256"` | `"\x1b[38;5;196m"` |
| `"ansi-16m"` | `"\x1b[38;2;255;0;0m"` |
| `"number"` | `0x1a2b3c` |
| `"rgb"` | `"rgb(255, 99, 71)"` |
| `"rgba"` | `"rgba(255, 99, 71, 0.5)"` |
| `"hsl"` | `"hsl(120, 50%, 50%)"` |
| `"hex"` | `"#1a2b3c"` |
| `"HEX"` | `"#1A2B3C"` |
| `"{rgb}"` | `{ r: 255, g: 99, b: 71 }` |
| `"{rgba}"` | `{ r: 255, g: 99, b: 71, a: 1 }` |
| `"[rgb]"` | `[ 255, 99, 71 ]` |
| `"[rgba]"` | `[ 255, 99, 71, 255]` |
There are many different ways to use this API:
- Validate and normalize colors to persist in a database (`number` is the most database-friendly)
- Convert colors to different formats
- Colorful logging beyond the 16 colors many use today (use `ansi` if you don't want to figure out what the user's terminal supports, otherwise use `ansi-16`, `ansi-256`, or `ansi-16m` for how many colors the terminal supports)
- Format colors for use in CSS injected into HTML
- Get the `r`, `g`, `b`, and `a` color components as JavaScript objects or numbers from a CSS color string
You can think of this as an alternative to the popular npm packages [`color`](https://github.com/Qix-/color) and [`tinycolor2`](https://github.com/bgrins/TinyColor) except with full support for parsing CSS color strings and zero dependencies built directly into Bun.
### Flexible input
You can pass in any of the following:
- Standard CSS color names like `"red"`
- Numbers like `0xff0000`
- Hex strings like `"#f00"`
- RGB strings like `"rgb(255, 0, 0)"`
- RGBA strings like `"rgba(255, 0, 0, 1)"`
- HSL strings like `"hsl(0, 100%, 50%)"`
- HSLA strings like `"hsla(0, 100%, 50%, 1)"`
- RGB objects like `{ r: 255, g: 0, b: 0 }`
- RGBA objects like `{ r: 255, g: 0, b: 0, a: 1 }`
- RGB arrays like `[255, 0, 0]`
- RGBA arrays like `[255, 0, 0, 255]`
- LAB strings like `"lab(50% 50% 50%)"`
- ... anything else that CSS can parse as a single color value
### Format colors as CSS
The `"css"` format outputs valid CSS for use in stylesheets, inline styles, CSS variables, css-in-js, etc. It returns the most compact representation of the color as a string.
```ts
Bun.color("red", "css"); // "red"
Bun.color(0xff0000, "css"); // "#f000"
Bun.color("#f00", "css"); // "red"
Bun.color("#ff0000", "css"); // "red"
Bun.color("rgb(255, 0, 0)", "css"); // "red"
Bun.color("rgba(255, 0, 0, 1)", "css"); // "red"
Bun.color("hsl(0, 100%, 50%)", "css"); // "red"
Bun.color("hsla(0, 100%, 50%, 1)", "css"); // "red"
Bun.color({ r: 255, g: 0, b: 0 }, "css"); // "red"
Bun.color({ r: 255, g: 0, b: 0, a: 1 }, "css"); // "red"
Bun.color([255, 0, 0], "css"); // "red"
Bun.color([255, 0, 0, 255], "css"); // "red"
```
If the input is unknown or fails to parse, `Bun.color` returns `null`.
### Format colors as ANSI (for terminals)
The `"ansi"` format outputs ANSI escape codes for use in terminals to make text colorful.
```ts
Bun.color("red", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color(0xff0000, "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("#f00", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("#ff0000", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("rgb(255, 0, 0)", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("rgba(255, 0, 0, 1)", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("hsl(0, 100%, 50%)", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color("hsla(0, 100%, 50%, 1)", "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color({ r: 255, g: 0, b: 0 }, "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color({ r: 255, g: 0, b: 0, a: 1 }, "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color([255, 0, 0], "ansi"); // "\u001b[38;2;255;0;0m"
Bun.color([255, 0, 0, 255], "ansi"); // "\u001b[38;2;255;0;0m"
```
This gets the color depth of stdout and automatically chooses one of `"ansi-16m"`, `"ansi-256"`, `"ansi-16"` based on the environment variables. If stdout doesn't support any form of ANSI color, it returns an empty string. As with the rest of Bun's color API, if the input is unknown or fails to parse, it returns `null`.
#### 24-bit ANSI colors (`ansi-16m`)
The `"ansi-16m"` format outputs 24-bit ANSI colors for use in terminals to make text colorful. 24-bit color means you can display 16 million colors on supported terminals, and requires a modern terminal that supports it.
This converts the input color to RGBA, and then outputs that as an ANSI color.
```ts
Bun.color("red", "ansi-16m"); // "\x1b[38;2;255;0;0m"
Bun.color(0xff0000, "ansi-16m"); // "\x1b[38;2;255;0;0m"
Bun.color("#f00", "ansi-16m"); // "\x1b[38;2;255;0;0m"
Bun.color("#ff0000", "ansi-16m"); // "\x1b[38;2;255;0;0m"
```
#### 256 ANSI colors (`ansi-256`)
The `"ansi-256"` format approximates the input color to the nearest of the 256 ANSI colors supported by some terminals.
```ts
Bun.color("red", "ansi-256"); // "\u001b[38;5;196m"
Bun.color(0xff0000, "ansi-256"); // "\u001b[38;5;196m"
Bun.color("#f00", "ansi-256"); // "\u001b[38;5;196m"
Bun.color("#ff0000", "ansi-256"); // "\u001b[38;5;196m"
```
To convert from RGBA to one of the 256 ANSI colors, we ported the algorithm that [`tmux` uses](https://github.com/tmux/tmux/blob/dae2868d1227b95fd076fb4a5efa6256c7245943/colour.c#L44-L55).
#### 16 ANSI colors (`ansi-16`)
The `"ansi-16"` format approximates the input color to the nearest of the 16 ANSI colors supported by most terminals.
```ts
Bun.color("red", "ansi-16"); // "\u001b[38;5;\tm"
Bun.color(0xff0000, "ansi-16"); // "\u001b[38;5;\tm"
Bun.color("#f00", "ansi-16"); // "\u001b[38;5;\tm"
Bun.color("#ff0000", "ansi-16"); // "\u001b[38;5;\tm"
```
This works by first converting the input to a 24-bit RGB color space, then to `ansi-256`, and then we convert that to the nearest 16 ANSI color.
### Format colors as numbers
The `"number"` format outputs a 24-bit number for use in databases, configuration, or any other use case where a compact representation of the color is desired.
```ts
Bun.color("red", "number"); // 16711680
Bun.color(0xff0000, "number"); // 16711680
Bun.color({ r: 255, g: 0, b: 0 }, "number"); // 16711680
Bun.color([255, 0, 0], "number"); // 16711680
Bun.color("rgb(255, 0, 0)", "number"); // 16711680
Bun.color("rgba(255, 0, 0, 1)", "number"); // 16711680
Bun.color("hsl(0, 100%, 50%)", "number"); // 16711680
Bun.color("hsla(0, 100%, 50%, 1)", "number"); // 16711680
```
### Get the red, green, blue, and alpha channels
You can use the `"{rgba}"`, `"{rgb}"`, `"[rgba]"` and `"[rgb]"` formats to get the red, green, blue, and alpha channels as objects or arrays.
#### `{rgba}` object
The `"{rgba}"` format outputs an object with the red, green, blue, and alpha channels.
```ts
type RGBAObject = {
// 0 - 255
r: number;
// 0 - 255
g: number;
// 0 - 255
b: number;
// 0 - 1
a: number;
};
```
Example:
```ts
Bun.color("hsl(0, 0%, 50%)", "{rgba}"); // { r: 128, g: 128, b: 128, a: 1 }
Bun.color("red", "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 }
Bun.color(0xff0000, "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 }
Bun.color({ r: 255, g: 0, b: 0 }, "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 }
Bun.color([255, 0, 0], "{rgba}"); // { r: 255, g: 0, b: 0, a: 1 }
```
To behave similarly to CSS, the `a` channel is a decimal number between `0` and `1`.
The `"{rgb}"` format is similar, but it doesn't include the alpha channel.
```ts
Bun.color("hsl(0, 0%, 50%)", "{rgb}"); // { r: 128, g: 128, b: 128 }
Bun.color("red", "{rgb}"); // { r: 255, g: 0, b: 0 }
Bun.color(0xff0000, "{rgb}"); // { r: 255, g: 0, b: 0 }
Bun.color({ r: 255, g: 0, b: 0 }, "{rgb}"); // { r: 255, g: 0, b: 0 }
Bun.color([255, 0, 0], "{rgb}"); // { r: 255, g: 0, b: 0 }
```
#### `[rgba]` array
The `"[rgba]"` format outputs an array with the red, green, blue, and alpha channels.
```ts
// All values are 0 - 255
type RGBAArray = [number, number, number, number];
```
Example:
```ts
Bun.color("hsl(0, 0%, 50%)", "[rgba]"); // [128, 128, 128, 255]
Bun.color("red", "[rgba]"); // [255, 0, 0, 255]
Bun.color(0xff0000, "[rgba]"); // [255, 0, 0, 255]
Bun.color({ r: 255, g: 0, b: 0 }, "[rgba]"); // [255, 0, 0, 255]
Bun.color([255, 0, 0], "[rgba]"); // [255, 0, 0, 255]
```
Unlike the `"{rgba}"` format, the alpha channel is an integer between `0` and `255`. This is useful for typed arrays where each channel must be the same underlying type.
The `"[rgb]"` format is similar, but it doesn't include the alpha channel.
```ts
Bun.color("hsl(0, 0%, 50%)", "[rgb]"); // [128, 128, 128]
Bun.color("red", "[rgb]"); // [255, 0, 0]
Bun.color(0xff0000, "[rgb]"); // [255, 0, 0]
Bun.color({ r: 255, g: 0, b: 0 }, "[rgb]"); // [255, 0, 0]
Bun.color([255, 0, 0], "[rgb]"); // [255, 0, 0]
```
### Format colors as hex strings
The `"hex"` format outputs a lowercase hex string for use in CSS or other contexts.
```ts
Bun.color("hsl(0, 0%, 50%)", "hex"); // "#808080"
Bun.color("red", "hex"); // "#ff0000"
Bun.color(0xff0000, "hex"); // "#ff0000"
Bun.color({ r: 255, g: 0, b: 0 }, "hex"); // "#ff0000"
Bun.color([255, 0, 0], "hex"); // "#ff0000"
```
The `"HEX"` format is similar, but it outputs a hex string with uppercase letters instead of lowercase letters.
```ts
Bun.color("hsl(0, 0%, 50%)", "HEX"); // "#808080"
Bun.color("red", "HEX"); // "#FF0000"
Bun.color(0xff0000, "HEX"); // "#FF0000"
Bun.color({ r: 255, g: 0, b: 0 }, "HEX"); // "#FF0000"
Bun.color([255, 0, 0], "HEX"); // "#FF0000"
```
### Bundle-time client-side color formatting
Like many of Bun's APIs, you can use macros to invoke `Bun.color` at bundle-time for use in client-side JavaScript builds:
```ts#client-side.ts
import { color } from "bun" with { type: "macro" };
console.log(color("#f00", "css"));
```
Then, build the client-side code:
```sh
bun build ./client-side.ts
```
This will output the following to `client-side.js`:
```js
// client-side.ts
console.log("red");
```

View File

@@ -104,6 +104,12 @@ Bun implements the following globals.
---
- [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream)
- Web
- Also [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream)
---
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
- Web
- &nbsp;
@@ -140,6 +146,12 @@ Bun implements the following globals.
---
- [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream)
- Web
- Also [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream)
---
- [`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)
- Web
- Also [`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent) [`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent) [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent).

View File

@@ -206,4 +206,42 @@ console.log(arr);
// => Uint8Array(32) [ 185, 77, 39, 185, 147, ... ]
```
<!-- Bun.sha; -->
### HMAC in `Bun.CryptoHasher`
`Bun.CryptoHasher` can be used to compute HMAC digests. To do so, pass the key to the constructor.
```ts
const hasher = new Bun.CryptoHasher("sha256", "secret-key");
hasher.update("hello world");
console.log(hasher.digest("hex"));
// => "095d5a21fe6d0646db223fdf3de6436bb8dfb2fab0b51677ecf6441fcf5f2a67"
```
When using HMAC, a more limited set of algorithms are supported:
- `"blake2b512"`
- `"md5"`
- `"sha1"`
- `"sha224"`
- `"sha256"`
- `"sha384"`
- `"sha512-224"`
- `"sha512-256"`
- `"sha512"`
Unlike the non-HMAC `Bun.CryptoHasher`, the HMAC `Bun.CryptoHasher` instance is not reset after `.digest()` is called, and attempting to use the same instance again will throw an error.
Other methods like `.copy()` and `.update()` are supported (as long as it's before `.digest()`), but methods like `.digest()` that finalize the hasher are not.
```ts
const hasher = new Bun.CryptoHasher("sha256", "secret-key");
hasher.update("hello world");
const copy = hasher.copy();
copy.update("!");
console.log(copy.digest("hex"));
// => "3840176c3d8923f59ac402b7550404b28ab11cb0ef1fa199130a5c37864b5497"
console.log(hasher.digest("hex"));
// => "095d5a21fe6d0646db223fdf3de6436bb8dfb2fab0b51677ecf6441fcf5f2a67"
```

View File

@@ -0,0 +1,214 @@
---
name: Migrate from npm install to bun install
---
`bun install` is a Node.js compatible npm client designed to be an incredibly fast successor to npm.
We've put a lot of work into making sure that the migration path from `npm install` to `bun install` is as easy as running `bun install` instead of `npm install`.
- **Designed for Node.js & Bun**: `bun install` installs a Node.js compatible `node_modules` folder. You can use it in place of `npm install` for Node.js projects without any code changes and without using Bun's runtime.
- **Automatically converts `package-lock.json`** to bun's `bun.lockb` lockfile format, preserving your existing resolved dependency versions without any manual work on your part. You can secretly use `bun install` in place of `npm install` at work without anyone noticing.
- **`.npmrc` compatible**: bun install reads npm registry configuration from npm's `.npmrc`, so you can use the same configuration for both npm and Bun.
- **Hardlinks**: On Windows and Linux, `bun install` uses hardlinks to conserve disk space and install times.
```bash
# It only takes one command to migrate
$ bun i
# To add dependencies:
$ bun i @types/bun
# To add devDependencies:
$ bun i -d @types/bun
# To remove a dependency:
$ bun rm @types/bun
```
---
## Run package.json scripts faster
Run scripts from package.json, executables from `node_modules/.bin` (sort of like `npx`), and JavaScript/TypeScript files (just like `node`) - all from a single simple command.
| NPM | Bun |
| ------------------ | ---------------- |
| `npm run <script>` | `bun <script>` |
| `npm exec <bin>` | `bun <bin>` |
| `node <file>` | `bun <file>` |
| `npx <package>` | `bunx <package>` |
When you use `bun run <executable>`, it will choose the locally-installed executable
```sh
# Run a package.json script:
$ bun my-script
$ bun run my-script
# Run an executable in node_modules/.bin:
$ bun my-executable # such as tsc, esbuild, etc.
$ bun run my-executable
# Run a JavaScript/TypeScript file:
$ bun ./index.ts
```
---
## Workspaces? Yes.
`bun install` supports workspaces similarly to npm, with more features.
In package.json, you can set `"workspaces"` to an array of relative paths.
```json#package.json
{
"name": "my-app",
"workspaces": ["packages/*", "apps/*"]
}
```
---
### Filter scripts by workspace name
In Bun, the `--filter` flag accepts a glob pattern, and will run the command concurrently for all workspace packages with a `name` that matches the pattern, respecting dependency order.
```sh
$ bun --filter 'lib-*' my-script
# instead of:
# npm run --workspace lib-foo --workspace lib-bar my-script
```
---
## Update dependencies
To update a dependency, you can use `bun update <package>`. This will update the dependency to the latest version that satisfies the semver range specified in package.json.
```sh
# Update a single dependency
$ bun update @types/bun
# Update all dependencies
$ bun update
# Ignore semver, update to the latest version
$ bun update @types/bun --latest
# Update a dependency to a specific version
$ bun update @types/bun@1.1.10
# Update all dependencies to the latest versions
$ bun update --latest
```
---
### View outdated dependencies
To view outdated dependencies, run `bun outdated`. This is like `npm outdated` but with more compact output.
```sh
$ bun outdated
┌────────────────────────────────────────┬─────────┬────────┬────────┐
│ Package │ Current │ Update │ Latest │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ @types/bun (dev) │ 1.1.6 │ 1.1.10 │ 1.1.10 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ @types/react (dev) │ 18.3.3 │ 18.3.8 │ 18.3.8 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ @typescript-eslint/eslint-plugin (dev) │ 7.16.1 │ 7.18.0 │ 8.6.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ @typescript-eslint/parser (dev) │ 7.16.1 │ 7.18.0 │ 8.6.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ @vscode/debugadapter (dev) │ 1.66.0 │ 1.67.0 │ 1.67.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ esbuild (dev) │ 0.21.5 │ 0.21.5 │ 0.24.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ eslint (dev) │ 9.7.0 │ 9.11.0 │ 9.11.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ mitata (dev) │ 0.1.11 │ 0.1.14 │ 1.0.2 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ prettier-plugin-organize-imports (dev) │ 4.0.0 │ 4.1.0 │ 4.1.0 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ source-map-js (dev) │ 1.2.0 │ 1.2.1 │ 1.2.1 │
├────────────────────────────────────────┼─────────┼────────┼────────┤
│ typescript (dev) │ 5.5.3 │ 5.6.2 │ 5.6.2 │
└────────────────────────────────────────┴─────────┴────────┴────────┘
```
---
## List installed packages
To list installed packages, you can use `bun pm ls`. This will list all the packages that are installed in the `node_modules` folder using Bun's lockfile as the source of truth. You can pass the `-a` flag to list all installed packages, including transitive dependencies.
```sh
# List top-level installed packages:
$ bun pm ls
my-pkg node_modules (781)
├── @types/node@20.16.5
├── @types/react@18.3.8
├── @types/react-dom@18.3.0
├── eslint@8.57.1
├── eslint-config-next@14.2.8
# List all installed packages:
$ bun pm ls -a
my-pkg node_modules
├── @alloc/quick-lru@5.2.0
├── @isaacs/cliui@8.0.2
│ └── strip-ansi@7.1.0
│ └── ansi-regex@6.1.0
├── @jridgewell/gen-mapping@0.3.5
├── @jridgewell/resolve-uri@3.1.2
...
```
---
## Create a package tarball
To create a package tarball, you can use `bun pm pack`. This will create a tarball of the package in the current directory.
```sh
# Create a tarball
$ bun pm pack
Total files: 46
Shasum: 2ee19b6f0c6b001358449ca0eadead703f326216
Integrity: sha512-ZV0lzWTEkGAMz[...]Gl4f8lA9sl97g==
Unpacked size: 0.41MB
Packed size: 117.50KB
```
---
## Shebang
If the package references `node` in the `#!/usr/bin/env node` shebang, `bun run` will by default respect it and use the system's `node` executable. You can force it to use Bun's `node` by passing `--bun` to `bun run`.
When you pass `--bun` to `bun run`, we create a symlink to the locally-installed Bun executable named `"node"` in a temporary directory and add that to your `PATH` for the duration of the script's execution.
```sh
# Force using Bun's runtime instead of node
$ bun --bun my-script
# This also works:
$ bun run --bun my-script
```
---
## Global installs
You can install packages globally using `bun i -g <package>`. This will install into a `.bun/install/global/node_modules` folder inside your home directory by default.
```sh
# Install a package globally
$ bun i -g eslint
# Run a globally-installed package without the `bun run` prefix
$ eslint --init
```

View File

@@ -374,6 +374,10 @@ export default {
description: `Bun's native Semver implementation is 20x faster than the popular \`node-semver\` package.`,
}), // "`Semver`"),
page("api/color", "Color", {
description: `Bun's color function leverages Bun's CSS parser for parsing, normalizing, and converting colors from user input to a variety of output formats.`,
}), // "`Color`"),
// divider("Dev Server"),
// page("bun-dev", "Vanilla"),
// page("dev/css", "CSS"),

View File

@@ -67,6 +67,7 @@ After Visual Studio, you need the following:
- Perl
- Ruby
- Node.js
- Ccache
{% callout %}
**Note** The Zig compiler is automatically downloaded, installed, and updated by the building process.
@@ -78,12 +79,12 @@ After Visual Studio, you need the following:
```ps1#WinGet
## Select "Add LLVM to the system PATH for all users" in the LLVM installer
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS
> winget install -i LLVM.LLVM -v 18.1.8 && winget install GoLang.Go Rustlang.Rustup NASM.NASM StrawberryPerl.StrawberryPerl RubyInstallerTeam.Ruby.3.2 OpenJS.NodeJS.LTS Ccache.Ccache
```
```ps1#Scoop
> irm https://get.scoop.sh | iex
> scoop install nodejs-lts go rust nasm ruby perl
> scoop install nodejs-lts go rust nasm ruby perl ccache
# scoop seems to be buggy if you install llvm and the rest at the same time
> scoop install llvm@18.1.8
```
@@ -104,10 +105,10 @@ If you intend on building WebKit locally (optional), you should install these pa
{% /codetabs %}
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\env.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
```ps1
> .\scripts\env.ps1
> .\scripts\vs-shell.ps1
```
To verify, you can check for an MSVC-only command line such as `mt.exe`
@@ -117,49 +118,41 @@ To verify, you can check for an MSVC-only command line such as `mt.exe`
```
{% callout %}
It is not recommended to install `ninja` / `cmake` into your global path, because you may run into a situation where you try to build bun without .\scripts\env.ps1 sourced.
It is not recommended to install `ninja` / `cmake` into your global path, because you may run into a situation where you try to build bun without .\scripts\vs-shell.ps1 sourced.
{% /callout %}
## Building
```ps1
> bun install
> bun run build
> .\scripts\env.ps1
> .\scripts\update-submodules.ps1 # this syncs git submodule state
> .\scripts\all-dependencies.ps1 # this builds all dependencies
> .\scripts\make-old-js.ps1 # runs some old code generators
# Configure build environment
> cmake -Bbuild -GNinja -DCMAKE_BUILD_TYPE=Debug
# Build bun
> ninja -Cbuild
# after the initial `bun run build` you can use the following to build
> ninja -Cbuild/debug
```
If this was successful, you should have a `bun-debug.exe` in the `build` folder.
If this was successful, you should have a `bun-debug.exe` in the `build/debug` folder.
```ps1
> .\build\bun-debug.exe --revision
> .\build\debug\bun-debug.exe --revision
```
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
You should add this to `$Env:PATH`. The simplest way to do so is to open the start menu, type "Path", and then navigate the environment variables menu to add `C:\.....\bun\build\debug` to the user environment variable `PATH`. You should then restart your editor (if it does not update still, log out and log back in).
## Extra paths
- WebKit is extracted to `build/bun-webkit`
- Zig is extracted to `.cache/zig/zig.exe`
- WebKit is extracted to `build/debug/cache/webkit/`
- Zig is extracted to `build/debug/cache/zig/bin/zig.exe`
## Tests
You can run the test suite either using `bun test`, or by using the wrapper script `packages\bun-internal-test`. The internal test package is a wrapper cli to run every test file in a separate instance of bun.exe, to prevent a crash in the test runner from stopping the entire suite.
You can run the test suite either using `bun test <path>`, or by using the wrapper script `packages\bun-internal-test`. The internal test package is a wrapper cli to run every test file in a separate instance of bun.exe, to prevent a crash in the test runner from stopping the entire suite.
```ps1
# Setup
> bun i --cwd packages\bun-internal-test
# Run the entire test suite with reporter
# the package.json script "test" uses "build/bun-debug.exe" by default
# the package.json script "test" uses "build/debug/bun-debug.exe" by default
> bun run test
# Run an individual test file:

View File

@@ -143,17 +143,30 @@ The following Web APIs are partially or completely supported.
---
- HTTP
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch)
[`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
[`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)
[`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
[`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
---
- URLs
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
[`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
---
- Streams
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
[`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream)
[`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream)
[`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
[`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy)
[`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream)
[`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream)
and associated classes
---
@@ -168,29 +181,36 @@ The following Web APIs are partially or completely supported.
---
- Encoding and decoding
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob)
[`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa)
[`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
[`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
---
- Timeouts
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout)
[`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
---
- Intervals
- [`setInterval`](https://developer.mozilla.org/en-US/docs/Web/API/setInterval)[`clearInterval`](https://developer.mozilla.org/en-US/docs/Web/API/clearInterval)
- [`setInterval`](https://developer.mozilla.org/en-US/docs/Web/API/setInterval)
[`clearInterval`](https://developer.mozilla.org/en-US/docs/Web/API/clearInterval)
---
- Crypto
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto)
[`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
[`CryptoKey`](https://developer.mozilla.org/en-US/docs/Web/API/CryptoKey)
---
- Debugging
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
[`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
---
@@ -205,7 +225,10 @@ The following Web APIs are partially or completely supported.
---
- User interaction
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert)
[`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm)
[`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt)
(intended for interactive CLIs)
<!-- - Blocking. Prints the alert message to terminal and awaits `[ENTER]` before proceeding. -->
<!-- - Blocking. Prints confirmation message and awaits `[y/N]` input from user. Returns `true` if user entered `y` or `Y`, `false` otherwise.
@@ -220,7 +243,10 @@ The following Web APIs are partially or completely supported.
- Events
- [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event) [`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent) [`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent) [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)
[`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent)
[`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent)
[`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
---

View File

@@ -233,7 +233,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`CompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/CompressionStream)
🔴 Not implemented.
🟢 Fully implemented.
### [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
@@ -261,7 +261,7 @@ The table below lists all globals implemented by Node.js and Bun's current compa
### [`DecompressionStream`](https://developer.mozilla.org/en-US/docs/Web/API/DecompressionStream)
🔴 Not implemented.
🟢 Fully implemented.
### [`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)

View File

@@ -40,7 +40,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
int32_t fd = STDOUT_FILENO;
if (callframe->argumentCount() > 1) {
fd = arg1.toInt32(globalObject);
RETURN_IF_EXCEPTION(scope, encodedJSValue());
RETURN_IF_EXCEPTION(scope, {});
} else {
toWriteArg = arg1;
}
@@ -53,7 +53,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
}
auto string = toWriteArg.toWTFString(globalObject);
RETURN_IF_EXCEPTION(scope, encodedJSValue());
RETURN_IF_EXCEPTION(scope, {});
auto utf8 = string.utf8();
auto length = utf8.length();
auto written = write(fd, utf8.data(), length);

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.29",
"version": "1.1.30",
"workspaces": [
"./packages/bun-types"
],
@@ -21,7 +21,10 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.4.5"
"typescript": "^5.4.5",
"caniuse-lite": "^1.0.30001620",
"autoprefixer": "^10.4.19",
"@mdn/browser-compat-data": "~5.5.28"
},
"resolutions": {
"bun-types": "workspace:packages/bun-types"
@@ -40,25 +43,34 @@
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release",
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"bump": "bun ./scripts/bump.ts",
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
"fmt": "prettier --config=.prettierrc-ci --write --cache './{.vscode,src,test,bench,packages/{bun-types,bun-inspector-*,bun-vscode,bun-debug-adapter-protocol}}/**/*.{mjs,ts,tsx,js,jsx}'",
"fmt:cpp": "bun run build --target clang-format",
"fmt:zig": "bun run build --target zig-format",
"fmt": "bun run prettier",
"fmt:cpp": "bun run clang-format",
"fmt:zig": "bun run zig-format",
"lint": "eslint './**/*.d.ts' --cache",
"lint:fix": "eslint './**/*.d.ts' --cache --fix",
"test": "node scripts/runner.node.mjs ./build/bun-debug",
"test:release": "node scripts/runner.node.mjs ./build-release/bun",
"test": "node scripts/runner.node.mjs --exec-path ./build/debug/bun-debug",
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",
"banned": "bun packages/bun-internal-test/src/linter.ts",
"zig": "vendor/zig/zig.exe",
"zig:fmt": "bun run fmt:zig",
"zig:fmt": "bun run zig-format",
"zig:check": "bun run zig build check --summary new",
"zig:check-all": "bun run zig build check-all --summary new",
"zig:check-windows": "bun run zig build check-windows --summary new",
"clang-format": "bun run build --target clang-format --fresh",
"clang-format:check": "bun run build --target clang-format-check --fresh",
"clang-tidy": "bun run build --target clang-tidy --fresh",
"clang-tidy:check": "bun run build --target clang-tidy --fresh",
"zig-format": "bun run build --target zig-format --fresh",
"zig-format:check": "bun run build --target zig-format-check --fresh"
"cmake": "bun ./scripts/build.mjs -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"clang-format": "bun run cmake --target clang-format",
"clang-format:check": "bun run cmake --target clang-format-check",
"clang-format:diff": "bun run cmake --target clang-format-diff",
"clang-tidy": "bun run cmake --target clang-tidy",
"clang-tidy:check": "bun run cmake --target clang-tidy-check",
"clang-tidy:diff": "bun run cmake --target clang-tidy-diff",
"zig-format": "bun run cmake --target zig-format",
"zig-format:check": "bun run cmake --target zig-format-check",
"zig-format:diff": "bun run cmake --target zig-format-diff",
"prettier": "bun run cmake --target prettier",
"prettier:check": "bun run cmake --target prettier-check",
"prettier:extra": "bun run cmake --target prettier-extra",
"prettier:diff": "bun run cmake --target prettier-diff"
}
}

View File

@@ -1,4 +0,0 @@
bin/bun-profile
bin/*.o
*.o
*.a

View File

@@ -2,13 +2,27 @@ import type { InspectorEventMap } from "../../../bun-inspector-protocol/src/insp
import type { JSC } from "../../../bun-inspector-protocol/src/protocol";
import type { DAP } from "../protocol";
// @ts-ignore
import type { ChildProcess } from "node:child_process";
import { spawn } from "node:child_process";
import { ChildProcess, spawn } from "node:child_process";
import { EventEmitter } from "node:events";
import { WebSocketInspector, remoteObjectToString } from "../../../bun-inspector-protocol/index";
import { UnixSignal, randomUnixPath } from "./signal";
import { AddressInfo, createServer } from "node:net";
import * as path from "node:path";
import { remoteObjectToString, WebSocketInspector } from "../../../bun-inspector-protocol/index";
import { randomUnixPath, TCPSocketSignal, UnixSignal } from "./signal";
import { Location, SourceMap } from "./sourcemap";
export async function getAvailablePort(): Promise<number> {
const server = createServer();
server.listen(0);
return new Promise((resolve, reject) => {
server.on("listening", () => {
const { port } = server.address() as AddressInfo;
server.close(() => {
resolve(port);
});
});
});
}
const capabilities: DAP.Capabilities = {
supportsConfigurationDoneRequest: true,
supportsFunctionBreakpoints: true,
@@ -489,36 +503,73 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
...env,
};
const url = `ws+unix://${randomUnixPath()}`;
const signal = new UnixSignal();
if (process.platform !== "win32") {
// we're on unix
const url = `ws+unix://${randomUnixPath()}`;
const signal = new UnixSignal();
signal.on("Signal.received", () => {
this.#attach({ url });
});
signal.on("Signal.received", () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url;
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
if (!started) {
throw new Error("Program could not be started.");
}
} else {
// we're on windows
// Create TCPSocketSignal
const url = `ws://127.0.0.1:${await getAvailablePort()}/${getRandomId()}`; // 127.0.0.1 so it resolves correctly on windows
const signal = new TCPSocketSignal(await getAvailablePort());
signal.on("Signal.received", async () => {
this.#attach({ url });
});
this.once("Adapter.terminated", () => {
signal.close();
});
const query = stopOnEntry ? "break=1" : "wait=1";
processEnv["BUN_INSPECT"] = `${url}?${query}`;
processEnv["BUN_INSPECT_NOTIFY"] = signal.url; // 127.0.0.1 so it resolves correctly on windows
// This is probably not correct, but it's the best we can do for now.
processEnv["FORCE_COLOR"] = "1";
processEnv["BUN_QUIET_DEBUG_LOGS"] = "1";
processEnv["BUN_DEBUG_QUIET_LOGS"] = "1";
const started = await this.#spawn({
command: runtime,
args: processArgs,
env: processEnv,
cwd,
isDebugee: true,
});
if (!started) {
throw new Error("Program could not be started.");
}
}
}
@@ -684,6 +735,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
async breakpointLocations(request: DAP.BreakpointLocationsRequest): Promise<DAP.BreakpointLocationsResponse> {
const { line, endLine, column, endColumn, source: source0 } = request;
if (process.platform === "win32") {
source0.path = source0.path ? normalizeWindowsPath(source0.path) : source0.path;
}
const source = await this.#getSource(sourceToId(source0));
const { locations } = await this.send("Debugger.getBreakpointLocations", {
@@ -788,6 +842,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
}
async #setBreakpointsByUrl(url: string, requests: DAP.SourceBreakpoint[], unsetOld?: boolean): Promise<Breakpoint[]> {
if (process.platform === "win32") {
url = url ? normalizeWindowsPath(url) : url;
}
const source = this.#getSourceIfPresent(url);
// If the source is not loaded, set a placeholder breakpoint at the start of the file.
@@ -1161,6 +1218,9 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
async gotoTargets(request: DAP.GotoTargetsRequest): Promise<DAP.GotoTargetsResponse> {
const { source: source0 } = request;
if (process.platform === "win32") {
source0.path = source0.path ? normalizeWindowsPath(source0.path) : source0.path;
}
const source = await this.#getSource(sourceToId(source0));
const { breakpoints } = await this.breakpointLocations(request);
@@ -1327,7 +1387,7 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
// 1. If it has a `path`, the client retrieves the source from the file system.
// 2. If it has a `sourceReference`, the client sends a `source` request.
// Moreover, the code is usually shown in a read-only editor.
const isUserCode = url.startsWith("/");
const isUserCode = path.isAbsolute(url);
const sourceMap = SourceMap(sourceMapURL);
const name = sourceName(url);
const presentationHint = sourcePresentationHint(url);
@@ -1646,12 +1706,11 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
// If the source does not have a path or is a builtin module,
// it cannot be retrieved from the file system.
if (typeof sourceId === "number" || !sourceId.startsWith("/")) {
if (typeof sourceId === "number" || !path.isAbsolute(sourceId)) {
throw new Error(`Source not found: ${sourceId}`);
}
// If the source is not present, it may not have been loaded yet.
// In that case, wait for it to be loaded.
let resolves = this.#pendingSources.get(sourceId);
if (!resolves) {
this.#pendingSources.set(sourceId, (resolves = []));
@@ -2107,7 +2166,6 @@ export class DebugAdapter extends EventEmitter<DebugAdapterEventMap> implements
close(): void {
this.#process?.kill();
// this.#signal?.close();
this.#inspector.close();
this.#reset();
}
@@ -2149,10 +2207,10 @@ function titleize(name: string): string {
}
function sourcePresentationHint(url?: string): DAP.Source["presentationHint"] {
if (!url || !url.startsWith("/")) {
if (!url || !path.isAbsolute(url)) {
return "deemphasize";
}
if (url.includes("/node_modules/")) {
if (url.includes("/node_modules/") || url.includes("\\node_modules\\")) {
return "normal";
}
return "emphasize";
@@ -2163,6 +2221,9 @@ function sourceName(url?: string): string {
return "unknown.js";
}
if (isJavaScript(url)) {
if (process.platform === "win32") {
url = url.replaceAll("\\", "/");
}
return url.split("/").pop() || url;
}
return `${url}.js`;
@@ -2567,3 +2628,15 @@ let sequence = 1;
function nextId(): number {
return sequence++;
}
export function getRandomId() {
return Math.random().toString(36).slice(2);
}
export function normalizeWindowsPath(winPath: string): string {
winPath = path.normalize(winPath);
if (winPath[1] === ":" && (winPath[2] === "\\" || winPath[2] === "/")) {
return (winPath.charAt(0).toUpperCase() + winPath.slice(1)).replaceAll("\\\\", "\\");
}
return winPath;
}

View File

@@ -1,5 +1,5 @@
import { EventEmitter } from "node:events";
import type { Server } from "node:net";
import type { Server, Socket } from "node:net";
import { createServer } from "node:net";
import { tmpdir } from "node:os";
import { join } from "node:path";
@@ -85,3 +85,75 @@ function parseUnixPath(path: string | URL): string {
throw new Error(`Invalid UNIX path: ${path}`);
}
}
export type TCPSocketSignalEventMap = {
"Signal.listening": [];
"Signal.error": [Error];
"Signal.closed": [];
"Signal.received": [string];
};
export class TCPSocketSignal extends EventEmitter {
#port: number;
#server: ReturnType<typeof createServer>;
#ready: Promise<void>;
constructor(port: number) {
super();
this.#port = port;
this.#server = createServer((socket: Socket) => {
socket.on("data", data => {
this.emit("Signal.received", data.toString());
});
socket.on("error", error => {
this.emit("Signal.error", error);
});
socket.on("close", () => {
this.emit("Signal.closed");
});
});
this.#ready = new Promise((resolve, reject) => {
this.#server.listen(this.#port, () => {
this.emit("Signal.listening");
resolve();
});
this.#server.on("error", reject);
});
}
emit<E extends keyof TCPSocketSignalEventMap>(event: E, ...args: TCPSocketSignalEventMap[E]): boolean {
if (isDebug) {
console.log(event, ...args);
}
return super.emit(event, ...args);
}
/**
* The TCP port.
*/
get port(): number {
return this.#port;
}
get url(): string {
return `tcp://127.0.0.1:${this.#port}`;
}
/**
* Resolves when the server is listening or rejects if an error occurs.
*/
get ready(): Promise<void> {
return this.#ready;
}
/**
* Closes the server.
*/
close(): void {
this.#server.close();
}
}

View File

@@ -1,4 +0,0 @@
bin/bun-profile
bin/*.o
*.o
*.a

View File

@@ -1,3 +0,0 @@
# `bun-plugin-css`
Not implemented.

View File

@@ -1 +0,0 @@
throw new Error("Not implemented.");

View File

@@ -1,10 +0,0 @@
{
"name": "bun-plugin-css",
"version": "0.0.1-alpha.0",
"module": "index.ts",
"type": "module",
"files": [
"index.ts",
"package.json"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-plugin-lightningcss`
Not implemented.

View File

@@ -1 +0,0 @@
throw new Error("Not implemented.");

View File

@@ -1,10 +0,0 @@
{
"name": "bun-plugin-lightningcss",
"version": "0.0.1-alpha.0",
"module": "index.ts",
"type": "module",
"files": [
"index.ts",
"package.json"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-plugin-mdx`
Not implemented.

View File

@@ -1 +0,0 @@
throw new Error("Not implemented.");

View File

@@ -1,10 +0,0 @@
{
"name": "bun-plugin-mdx",
"version": "0.0.1-alpha.0",
"module": "index.ts",
"type": "module",
"files": [
"index.ts",
"package.json"
]
}

View File

@@ -1,169 +0,0 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
\*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
\*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
\*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
\*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
.cache
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*

View File

@@ -1,149 +0,0 @@
# `bun-plugin-server-components`
The official Bun plugin for **server components**.
## Installation
```sh
bun add bun-plugin-server-components -d
```
## Context
Server components are a new abstraction for building web applications. They look similar to standard React/JSX components, but render exclusively on the server. They differ from classic "client components" in a few ways:
1. They can be `async`.
2. Their implementation can run privileged code like database queries. Normally this would be unsafe, because the source code of client components are typically bundled and sent to the client, where they can be inspected and reverse-engineered. Server components are never sent to the client, so they can run privileged code safely.
3. They _cannot_ contain stateful hooks like `useState` or `useEffect`.
Server components require a deep integration with the bundler to work. To understand why, we need a bit of background on how server components work.
### How server components work
Imagine you have a server component that looks like this:
```tsx
// index.tsx
import { Component } from "./Component";
export default async function HomePage() {
return (
<div>
<Component />
</div>
);
}
```
This file imports a client component called `Component`.
```ts
// ./Component.tsx
"use client";
export function Component() {
return <div>Hello world</div>;
}
```
To run this component we need to generate two builds.
> Here the term "build" refers to a typical bundling step—the act of converting a set of entrypoints into a set of bundles.
1. The first is our "server component build". It contains all the code we need to render `HomePage` to a component tree. When an incoming `Request` comes in, we can use React's built-in tools to convert this tree into a "virtual DOM stream" that we can return as a `Response`.
2. The second is our "client build". It contains the bundled versions of all client components that were referenced by our server components.
The browser hits the server and gets back the "virtual DOM stream". The virtual DOM stream will contain references to client components, which will be loaded from the client bundle. React provides a built-in utility (`createFromFetch`)that accepts the VDOM stream, dynamically loads the necessary client components, and returns a renderable component.
```ts
import { createRoot } from "react-dom/client";
import { createFromFetch } from "react-server-dom-webpack/client.browser";
const stream = fetch("/", { headers: { Accept: "text/x-component" } });
const data = createFromFetch(stream);
const root = createRoot(document);
root.render(<App />);
```
### Server-side rendering
One potentially confusing aspect of server components is that they "return" virtual DOM. From the perspective of a server component, client components are black boxes.
If we want to do server-side rendering, we need to render our server component to VDOM, _then_ render the VDOM to plain HTML. These are two distinct steps. The second step requires a _third build_, we we'll call the "SSR build". Like the "client build", this build will bundle all the client components. Unlike the "client build", those bundles will be intended for consumption on the server; in bundler terms, the build's `"target"` will be`"bun"` (or perhaps `"node"`).
### Bundling server components
That's a high-level overview of how server components work. The important takeaway is that we need to generate totally separate bundles for server and client components.
But it's not just a simple matter of running two separate bundling scripts. The true "entrypoints" of our application are the server components. Over the course of bundling our server components, we will discover some files containing the `"use client"` directive; these files then become the entrypoints for our "client build", which will require a totally separate build configuration from the server build.
The goal of this plugin is to hide the complexty of this multi-stage build from the user.
## Usage
To use this plugin:
```ts
import ServerComponentsPlugin from "bun-plugin-server-components";
await Bun.build({
entrypoints: ["./index.tsx"], // server component files
plugins: [
ServerComponentsPlugin({
// plugin configuration
}),
],
// other configuration
});
```
The `"entrypoints"` you pass into `Bun.build()` should be your _server components_. Bun's bundler will automatically detect any files containing the `"use client"` directive, and will use those files as entrypoints for the "client build" and "SSR build". The bundler configuration for these builds can be provided `client` and `ssr` keys respectively.
```ts
import ServerComponentsPlugin from "bun-plugin-server-components";
await Bun.build({
entrypoints: ["./index.tsx"], // server component files
outdir: "./build",
manifest: true,
plugins: [ServerComponentsPlugin({
client: {
entrypoints: [], // optional - additional client entrypoints
outdir: "./build/client", // default: inherits from the main build
target: "browser",
plugins: [/* */],
}
ssr: {
entrypoints: [], // optional - additional SSR entrypoints
outdir: "./build/client", // default: inherits from the main build
target: "bun", // this is default
plugins: [/* */],
}
})],
});
```
The result of `Bun.build()` will contain additional manifests for the SSR and client builds.
```ts
const result = await Bun.build({
// config
plugins: [
ServerComponentsPlugin({
/* config */
}),
],
});
// standard manifest
// for the top-level (server components) build
result.manifest;
// manifest for client build
result.clientManifest;
// manifest for client build
result.ssrManifest;
```
Once the build is complete, use the manifests to implement your RSC server.

View File

@@ -1,10 +0,0 @@
import { BunPlugin, BuildConfig } from "bun";
function Plugin(config: { client?: BuildConfig; ssr?: BuildConfig }): BunPlugin {
return {
name: "bun-plugin-server-components",
SECRET_SERVER_COMPONENTS_INTERNALS: config,
} as any;
}
export default Plugin;

View File

@@ -1,28 +0,0 @@
{
"name": "bun-plugin-server-components",
"version": "0.0.1-alpha.0",
"module": "index.ts",
"type": "module",
"types": "index.ts",
"exports": {
".": {
"import": "./index.ts",
"require": "./index.ts",
"default": "./index.js"
},
"./package.json": "./package.json"
},
"files": [
"index.ts",
"tsconfig.json",
"package.json",
"modules.d.ts"
],
"devDependencies": {
"@types/js-yaml": "^4.0.5"
},
"dependencies": {
"bun-types": "canary",
"js-yaml": "^4.1.0"
}
}

View File

@@ -1,23 +0,0 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"module": "esnext",
"target": "esnext",
"moduleResolution": "bundler",
"moduleDetection": "force",
"allowImportingTsExtensions": true,
"noEmit": true,
"composite": true,
"strict": true,
"downlevelIteration": true,
"skipLibCheck": true,
"jsx": "react-jsx",
"allowSyntheticDefaultImports": true,
"forceConsistentCasingInFileNames": true,
"allowJs": true,
"types": [
"bun-types" // add Bun global
]
},
"include": ["**/*.ts", "modules.d.ts"]
}

View File

@@ -2,6 +2,27 @@ import { AwsClient } from "aws4fetch";
import { getBuild, getRelease, getSemver, getSha } from "../src/github";
import { join, tmp } from "../src/fs";
// The source of truth for the git sha is what's in the local build, extracted from features.json
// NOT the git tag revision.
// Ideally, these are always the same, but mistakes can happen.
const local =
"bun-" +
(
{
darwin: "darwin",
win32: "windows",
linux: "linux",
} as any
)[process.platform] +
"-" +
(
{
arm64: "aarch64",
x64: "x64",
} as any
)[process.arch] +
".zip";
const dryRun = process.argv.includes("--dry-run");
const [tag] = process.argv.slice(2);
@@ -28,40 +49,68 @@ const full_commit_hash = await getSha(tag, "long");
console.log("Found release:", release.tag_name, "with commit hash:", full_commit_hash);
console.log("Found build:", full_commit_hash);
const isCanary = release.tag_name === "canary";
let paths: string[];
if (latest.tag_name === release.tag_name) {
paths = ["releases/latest", `releases/${release.tag_name}`, `releases/${full_commit_hash}`];
} else if (release.tag_name === "canary") {
try {
const build = await getSemver("canary", await getBuild());
paths = ["releases/canary", `releases/${build}`, `releases/${full_commit_hash}-canary`];
} catch (error) {
console.warn(error);
paths = ["releases/canary"];
let paths: string[] = [];
async function setPaths(revision: string, isCanary: boolean) {
const releaseSha = `releases/${revision + (isCanary ? "-canary" : "")}`;
if (latest.tag_name === release.tag_name) {
paths = ["releases/latest", `releases/${release.tag_name}`, releaseSha];
} else if (isCanary) {
try {
const build = await getSemver("canary", await getBuild());
paths = ["releases/canary", `releases/${build}`, releaseSha];
} catch (error) {
console.warn(error);
paths = ["releases/canary", releaseSha];
}
} else {
paths = [`releases/${release.tag_name}`, releaseSha];
}
} else {
paths = [`releases/${release.tag_name}`, `releases/${full_commit_hash}`];
}
console.log("Found paths:", paths);
const local =
"bun-" +
(
{
darwin: "darwin",
win32: "windows",
linux: "linux",
} as any
)[process.platform] +
"-" +
(
{
arm64: "aarch64",
x64: "x64",
} as any
)[process.arch] +
".zip";
console.log("Found paths:", paths);
}
async function getFeaturesJSON(body: ArrayBuffer) {
// extract feature data using the local build
const temp = tmp();
await Bun.write(join(temp, "bun.zip"), body);
let unzip = Bun.spawnSync({
cmd: ["unzip", join(temp, "bun.zip")],
cwd: temp,
});
if (!unzip.success) throw new Error("Failed to unzip");
let data = Bun.spawnSync({
cmd: [
join(temp, local.replace(".zip", ""), "bun"),
"--print",
'JSON.stringify(require("bun:internal-for-testing").crash_handler.getFeatureData())',
],
cwd: temp,
env: {
...process.env,
BUN_DEBUG_QUIET_LOGS: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "0",
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1",
},
stdio: ["ignore", "pipe", "inherit"],
});
return data.stdout.toString("utf8").trim();
}
// Make the first asset the local build
for (let i = 0; i < release.assets.length; i++) {
const asset = release.assets[i];
if (asset.name === local) {
release.assets.splice(i, 1);
release.assets.unshift(asset);
break;
}
}
if (release?.assets?.[0]?.name !== local) {
throw new Error("Expected local build to be the first asset");
}
for (const asset of release.assets) {
const url = asset.browser_download_url;
@@ -83,39 +132,25 @@ for (const asset of release.assets) {
contentType = response.headers.get("Content-Type") || "";
}
console.log("Downloading asset:", name);
const body = await response.arrayBuffer();
if (name == local) {
// extract feature data using the local build
const temp = tmp();
await Bun.write(join(temp, "bun.zip"), body);
let unzip = Bun.spawnSync({
cmd: ["unzip", join(temp, "bun.zip")],
cwd: temp,
});
if (!unzip.success) throw new Error("Failed to unzip");
let data = Bun.spawnSync({
cmd: [
join(temp, local.replace(".zip", ""), "bun"),
"--print",
'JSON.stringify(require("bun:internal-for-testing").crash_handler.getFeatureData())',
],
cwd: temp,
env: {
...process.env,
BUN_DEBUG_QUIET_LOGS: "1",
BUN_GARBAGE_COLLECTOR_LEVEL: "0",
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "1",
},
stdio: ["ignore", "pipe", "inherit"],
});
const json = data.stdout.toString("utf8");
const text = await getFeaturesJSON(body);
const features = JSON.parse(text);
const sha = features.revision;
if (features.is_canary && !isCanary) {
console.warn("Local build is a canary but release is not tagged as canary.");
}
await setPaths(sha, features.is_canary);
console.log("features.json:", JSON.stringify(features, null, 2));
for (const path of paths) {
const key = `${path}/features.json`;
console.log("Uploading:", key);
await uploadToS3({
key,
body: new TextEncoder().encode(json).buffer,
body: new TextEncoder().encode(text).buffer,
headers: {
"Content-Type": contentType,
"Content-Disposition": `attachment; filename="${name}"`,

View File

@@ -2282,7 +2282,7 @@ declare module "bun" {
*/
development?: boolean;
error?: (this: Server, request: ErrorLike) => Response | Promise<Response> | undefined | Promise<undefined>;
error?: (this: Server, error: ErrorLike) => Response | Promise<Response> | undefined | Promise<undefined>;
/**
* Uniquely identify a server instance with an ID
@@ -2660,7 +2660,7 @@ declare module "bun" {
* @param closeActiveConnections Immediately terminate in-flight requests, websockets, and stop accepting new connections.
* @default false
*/
stop(closeActiveConnections?: boolean): void;
stop(closeActiveConnections?: boolean): Promise<void>;
/**
* Update the `fetch` and `error` handlers without restarting the server.
@@ -3029,6 +3029,87 @@ declare module "bun" {
type StringLike = string | { toString(): string };
type ColorInput =
| { r: number; g: number; b: number; a?: number }
| [number, number, number]
| [number, number, number, number]
| Uint8Array
| Uint8ClampedArray
| Float32Array
| Float64Array
| string
| number
| { toString(): string };
function color(
input: ColorInput,
outputFormat?: /**
* True color ANSI color string, for use in terminals
* @example \x1b[38;2;100;200;200m
*/
| "ansi"
/**
* 256 color ANSI color string, for use in terminals which don't support true color
*
* Tries to match closest 24-bit color to 256 color palette
*/
| "ansi256"
/**
* Lowercase hex color string without alpha
* @example #aabb11
*/
| "hex"
/**
* RGB color string without alpha
* rgb(100, 200, 200)
*/
| "rgb"
/**
* RGB color string with alpha
* rgba(100, 200, 200, 0.5)
*/
| "rgba"
| "hsl"
| "lab"
| "css"
| "lab"
| "HEX",
): string | null;
function color(
input: ColorInput,
/**
* An array of numbers representing the RGB color
* @example [100, 200, 200]
*/
outputFormat: "[rgb]",
): [number, number, number] | null;
function color(
input: ColorInput,
/**
* An array of numbers representing the RGBA color
* @example [100, 200, 200, 255]
*/
outputFormat: "[rgba]",
): [number, number, number, number] | null;
function color(
input: ColorInput,
/**
* An object representing the RGB color
* @example { r: 100, g: 200, b: 200 }
*/
outputFormat: "{rgb}",
): { r: number; g: number; b: number } | null;
function color(
input: ColorInput,
/**
* An object representing the RGBA color
* @example { r: 100, g: 200, b: 200, a: 0.5 }
*/
outputFormat: "{rgba}",
): { r: number; g: number; b: number; a: number } | null;
function color(input: ColorInput, outputFormat: "number"): number | null;
interface Semver {
/**
* Test if the version satisfies the range. Stringifies both arguments. Returns `true` or `false`.
@@ -3264,8 +3345,9 @@ declare module "bun" {
* Create a new hasher
*
* @param algorithm The algorithm to use. See {@link algorithms} for a list of supported algorithms
* @param hmacKey Optional key for HMAC. Must be a string or `TypedArray`. If not provided, the hasher will be a non-HMAC hasher.
*/
constructor(algorithm: SupportedCryptoAlgorithms);
constructor(algorithm: SupportedCryptoAlgorithms, hmacKey?: string | NodeJS.TypedArray);
/**
* Update the hash with data
@@ -4615,6 +4697,32 @@ declare module "bun" {
* @default cmds[0]
*/
argv0?: string;
/**
* An {@link AbortSignal} that can be used to abort the subprocess.
*
* This is useful for aborting a subprocess when some other part of the
* program is aborted, such as a `fetch` response.
*
* Internally, this works by calling `subprocess.kill(1)`.
*
* @example
* ```ts
* const controller = new AbortController();
* const { signal } = controller;
* const start = performance.now();
* const subprocess = Bun.spawn({
* cmd: ["sleep", "100"],
* signal,
* });
* await Bun.sleep(1);
* controller.abort();
* await subprocess.exited;
* const end = performance.now();
* console.log(end - start); // 1ms instead of 101ms
* ```
*/
signal?: AbortSignal;
}
type OptionsToSubprocess<Opts extends OptionsObject> =

View File

@@ -688,6 +688,18 @@ declare global {
new <W = any>(underlyingSink?: Bun.UnderlyingSink<W>, strategy?: QueuingStrategy<W>): WritableStream<W>;
};
type CompressionFormat = "deflate" | "deflate-raw" | "gzip";
type CompressionStream<R = any, W = any> = import("stream/web").CompressionStream<R, W>;
const CompressionStream: {
prototype: CompressionStream;
new <R = any, W = any>(format: CompressionFormat): CompressionStream<R, W>;
};
type DecompressionStream<R = any, W = any> = import("stream/web").DecompressionStream<R, W>;
const DecompressionStream: {
prototype: DecompressionStream;
new <R = any, W = any>(format: CompressionFormat): DecompressionStream<R, W>;
};
interface Worker extends _Worker {}
var Worker: typeof globalThis extends {
onerror: any;

View File

@@ -116,6 +116,8 @@ ReadableStreamDefaultReader;
ReadableStreamDefaultController;
// ReadableByteStreamController;
WritableStreamDefaultWriter;
CompressionStream;
DecompressionStream;
function stuff(arg: Blob): any;
function stuff(arg: WebSocket): any;
@@ -135,6 +137,8 @@ function stuff(arg: TextDecoder): any;
function stuff(arg: ReadableStreamDefaultReader): any;
function stuff(arg: ReadableStreamDefaultController): any;
function stuff(arg: WritableStreamDefaultWriter): any;
function stuff(arg: CompressionStream): any;
function stuff(arg: DecompressionStream): any;
function stuff(arg: any) {
return "asfd";
}
@@ -301,6 +305,16 @@ const writableStream = new WritableStream();
const ws = new WebSocket("ws://www.host.com/path");
ws.send("asdf");
}
{
const a = new CompressionStream("gzip");
a.readable;
a.writable;
}
{
const a = new DecompressionStream("gzip");
a.readable;
a.writable;
}
atob("asf");
btoa("asdf");

View File

@@ -0,0 +1 @@
DisableFormat: true

File diff suppressed because it is too large Load Diff

View File

@@ -15,21 +15,20 @@
* limitations under the License.
*/
/* This Server Name Indication hostname tree is written in C++ but could be
* ported to C. Overall it looks like crap, but has no memory allocations in
* fast path and is O(log n). */
/* This Server Name Indication hostname tree is written in C++ but could be ported to C.
* Overall it looks like crap, but has no memory allocations in fast path and is O(log n). */
#ifndef SNI_TREE_H
#define SNI_TREE_H
#ifndef LIBUS_NO_SSL
#include <algorithm>
#include <cstdlib>
#include <cstring>
#include <map>
#include <memory>
#include <string_view>
#include <cstring>
#include <cstdlib>
#include <algorithm>
/* We only handle a maximum of 10 labels per hostname */
#define MAX_LABELS 10
@@ -38,186 +37,180 @@
thread_local void (*sni_free_cb)(void *);
struct sni_node {
/* Empty nodes must always hold null */
void *user = nullptr;
std::map<std::string_view, std::unique_ptr<sni_node>> children;
/* Empty nodes must always hold null */
void *user = nullptr;
std::map<std::string_view, std::unique_ptr<sni_node>> children;
~sni_node() {
for (auto &p : children) {
/* The data of our string_views are managed by malloc */
free((void *)p.first.data());
~sni_node() {
for (auto &p : children) {
/* The data of our string_views are managed by malloc */
free((void *) p.first.data());
/* Call destructor passed to sni_free only if we hold data.
* This is important since sni_remove does not have sni_free_cb set */
if (p.second.get()->user) {
sni_free_cb(p.second.get()->user);
}
/* Call destructor passed to sni_free only if we hold data.
* This is important since sni_remove does not have sni_free_cb set */
if (p.second.get()->user) {
sni_free_cb(p.second.get()->user);
}
}
}
}
};
// this can only delete ONE single node, but may cull "empty nodes with null as
// data"
void *removeUser(struct sni_node *root, unsigned int label,
std::string_view *labels, unsigned int numLabels) {
// this can only delete ONE single node, but may cull "empty nodes with null as data"
void *removeUser(struct sni_node *root, unsigned int label, std::string_view *labels, unsigned int numLabels) {
/* If we are in the bottom (past bottom by one), there is nothing to remove */
if (label == numLabels) {
void *user = root->user;
/* Mark us for culling on the way up */
root->user = nullptr;
return user;
}
/* If we are in the bottom (past bottom by one), there is nothing to remove */
if (label == numLabels) {
void *user = root->user;
/* Mark us for culling on the way up */
root->user = nullptr;
return user;
}
/* Is this label a child of root? */
auto it = root->children.find(labels[label]);
if (it == root->children.end()) {
/* We cannot continue */
return nullptr;
}
/* Is this label a child of root? */
auto it = root->children.find(labels[label]);
if (it == root->children.end()) {
/* We cannot continue */
return nullptr;
}
void *removedUser =
removeUser(it->second.get(), label + 1, labels, numLabels);
void *removedUser = removeUser(it->second.get(), label + 1, labels, numLabels);
/* On the way back up, we may cull empty nodes with no children.
* This ends up being where we remove all nodes */
if (it->second.get()->children.empty() && it->second.get()->user == nullptr) {
/* On the way back up, we may cull empty nodes with no children.
* This ends up being where we remove all nodes */
if (it->second.get()->children.empty() && it->second.get()->user == nullptr) {
/* The data of our string_views are managed by malloc */
free((void *)it->first.data());
/* The data of our string_views are managed by malloc */
free((void *) it->first.data());
/* This can only happen with user set to null, otherwise we use sni_free_cb
* which is unset by sni_remove */
root->children.erase(it);
}
/* This can only happen with user set to null, otherwise we use sni_free_cb which is unset by sni_remove */
root->children.erase(it);
}
return removedUser;
return removedUser;
}
void *getUser(struct sni_node *root, unsigned int label,
std::string_view *labels, unsigned int numLabels) {
void *getUser(struct sni_node *root, unsigned int label, std::string_view *labels, unsigned int numLabels) {
/* Do we have labels to match? Otherwise, return where we stand */
if (label == numLabels) {
return root->user;
}
/* Try and match by our label */
auto it = root->children.find(labels[label]);
if (it != root->children.end()) {
void *user = getUser(it->second.get(), label + 1, labels, numLabels);
if (user) {
return user;
/* Do we have labels to match? Otherwise, return where we stand */
if (label == numLabels) {
return root->user;
}
}
/* Try and match by wildcard */
it = root->children.find("*");
if (it == root->children.end()) {
/* Matching has failed for both label and wildcard */
return nullptr;
}
/* Try and match by our label */
auto it = root->children.find(labels[label]);
if (it != root->children.end()) {
void *user = getUser(it->second.get(), label + 1, labels, numLabels);
if (user) {
return user;
}
}
/* We matched by wildcard */
return getUser(it->second.get(), label + 1, labels, numLabels);
/* Try and match by wildcard */
it = root->children.find("*");
if (it == root->children.end()) {
/* Matching has failed for both label and wildcard */
return nullptr;
}
/* We matched by wildcard */
return getUser(it->second.get(), label + 1, labels, numLabels);
}
extern "C" {
void *sni_new() { return new sni_node; }
void sni_free(void *sni, void (*cb)(void *)) {
/* We want to run this callback for every remaining name */
sni_free_cb = cb;
delete (sni_node *)sni;
}
/* Returns non-null if this name already exists */
int sni_add(void *sni, const char *hostname, void *user) {
struct sni_node *root = (struct sni_node *)sni;
/* Traverse all labels in hostname */
for (std::string_view view(hostname, strlen(hostname)), label; view.length();
view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
auto it = root->children.find(label);
if (it == root->children.end()) {
/* Duplicate this label for our kept string_view of it */
void *labelString = malloc(label.length());
memcpy(labelString, label.data(), label.length());
it = root->children
.emplace(std::string_view((char *)labelString, label.length()),
std::make_unique<sni_node>())
.first; // NOLINT(clang-analyzer-unix.Malloc)
void *sni_new() {
return new sni_node;
}
root = it->second.get();
}
void sni_free(void *sni, void (*cb)(void *)) {
/* We want to run this callback for every remaining name */
sni_free_cb = cb;
/* We must never add multiple contexts for the same name, as that would
* overwrite and leak */
if (root->user) {
return 1;
}
root->user = user;
return 0;
}
/* Removes the exact match. Wildcards are treated as the verbatim asterisk char,
* not as an actual wildcard */
void *sni_remove(void *sni, const char *hostname) {
struct sni_node *root = (struct sni_node *)sni;
/* I guess 10 labels is an okay limit */
std::string_view labels[10];
unsigned int numLabels = 0;
/* We traverse all labels first of all */
for (std::string_view view(hostname, strlen(hostname)), label; view.length();
view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
/* Anything longer than 10 labels is forbidden */
if (numLabels == 10) {
return nullptr;
delete (sni_node *) sni;
}
labels[numLabels++] = label;
}
/* Returns non-null if this name already exists */
int sni_add(void *sni, const char *hostname, void *user) {
struct sni_node *root = (struct sni_node *) sni;
return removeUser(root, 0, labels, numLabels);
}
/* Traverse all labels in hostname */
for (std::string_view view(hostname, strlen(hostname)), label;
view.length(); view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
void *sni_find(void *sni, const char *hostname) {
struct sni_node *root = (struct sni_node *)sni;
auto it = root->children.find(label);
if (it == root->children.end()) {
/* Duplicate this label for our kept string_view of it */
void *labelString = malloc(label.length());
memcpy(labelString, label.data(), label.length());
/* I guess 10 labels is an okay limit */
std::string_view labels[10];
unsigned int numLabels = 0;
it = root->children.emplace(std::string_view((char *) labelString, label.length()),
std::make_unique<sni_node>()).first; // NOLINT(clang-analyzer-unix.Malloc)
}
/* We traverse all labels first of all */
for (std::string_view view(hostname, strlen(hostname)), label; view.length();
view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
root = it->second.get();
}
/* Anything longer than 10 labels is forbidden */
if (numLabels == 10) {
return nullptr;
/* We must never add multiple contexts for the same name, as that would overwrite and leak */
if (root->user) {
return 1;
}
root->user = user;
return 0;
}
labels[numLabels++] = label;
}
/* Removes the exact match. Wildcards are treated as the verbatim asterisk char, not as an actual wildcard */
void *sni_remove(void *sni, const char *hostname) {
struct sni_node *root = (struct sni_node *) sni;
/* I guess 10 labels is an okay limit */
std::string_view labels[10];
unsigned int numLabels = 0;
/* We traverse all labels first of all */
for (std::string_view view(hostname, strlen(hostname)), label;
view.length(); view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
/* Anything longer than 10 labels is forbidden */
if (numLabels == 10) {
return nullptr;
}
labels[numLabels++] = label;
}
return removeUser(root, 0, labels, numLabels);
}
void *sni_find(void *sni, const char *hostname) {
struct sni_node *root = (struct sni_node *) sni;
/* I guess 10 labels is an okay limit */
std::string_view labels[10];
unsigned int numLabels = 0;
/* We traverse all labels first of all */
for (std::string_view view(hostname, strlen(hostname)), label;
view.length(); view.remove_prefix(std::min(view.length(), label.length() + 1))) {
/* Label is the token separated by dot */
label = view.substr(0, view.find('.', 0));
/* Anything longer than 10 labels is forbidden */
if (numLabels == 10) {
return nullptr;
}
labels[numLabels++] = label;
}
return getUser(root, 0, labels, numLabels);
}
return getUser(root, 0, labels, numLabels);
}
}
#endif

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More