Compare commits

..

6 Commits

Author SHA1 Message Date
Jarred Sumner
c086e62bd8 Merge branch 'main' into jarred/shell-ref 2024-07-10 22:08:50 -07:00
Jarred Sumner
2e251e390e Add hack 2024-06-22 15:13:34 -07:00
Jarred Sumner
c6da7aba56 Merge branch 'main' into jarred/shell-ref 2024-06-22 14:50:46 -07:00
Jarred Sumner
4fdb3e6825 Merge branch 'main' into jarred/shell-ref 2024-06-19 00:54:05 -07:00
Jarred Sumner
63e3b26564 Update interpreter.zig 2024-06-18 14:42:05 -07:00
Jarred Sumner
e7340f440b Drain microtasks in the Bun Shell 2024-06-18 13:59:27 -07:00
385 changed files with 9826 additions and 10632 deletions

View File

@@ -10,10 +10,9 @@ steps:
blocked_state: "running"
- label: ":pipeline:"
command: "buildkite-agent pipeline upload .buildkite/ci.yml"
agents:
queue: "build-darwin"
command:
- ".buildkite/scripts/prepare-build.sh"
queue: "build-linux"
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
label: ":github:"

File diff suppressed because it is too large Load Diff

View File

@@ -1,55 +0,0 @@
#!/bin/bash
set -eo pipefail
source "$(dirname "$0")/env.sh"
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
cwd="$(pwd)"
mkdir -p build
source "$(dirname "$0")/download-artifact.sh" "build/bun-deps/**" --step "$BUILDKITE_GROUP_KEY-build-deps"
source "$(dirname "$0")/download-artifact.sh" "build/bun-zig.o" --step "$BUILDKITE_GROUP_KEY-build-zig"
source "$(dirname "$0")/download-artifact.sh" "build/bun-cpp-objects.a" --step "$BUILDKITE_GROUP_KEY-build-cpp" --split
cd build
run_command cmake .. "${CMAKE_FLAGS[@]}" \
-GNinja \
-DBUN_LINK_ONLY="1" \
-DNO_CONFIGURE_DEPENDS="1" \
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
-DBUN_CPP_ARCHIVE="$cwd/build/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="$cwd/build/bun-deps" \
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
-DCPU_TARGET="$CPU_TARGET" \
-DUSE_LTO="$USE_LTO" \
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
-DCANARY="$CANARY" \
-DGIT_SHA="$GIT_SHA"
run_command ninja -v -j "$CPUS"
run_command ls
tag="bun-$BUILDKITE_GROUP_KEY"
if [ "$USE_LTO" == "OFF" ]; then
# Remove OS check when LTO is enabled on macOS again
if [[ "$tag" == *"darwin"* ]]; then
tag="$tag-nolto"
fi
fi
for name in bun bun-profile; do
dir="$tag"
if [ "$name" == "bun-profile" ]; then
dir="$tag-profile"
fi
run_command chmod +x "$name"
run_command "./$name" --revision
run_command mkdir -p "$dir"
run_command mv "$name" "$dir/$name"
run_command zip -r "$dir.zip" "$dir"
source "$cwd/.buildkite/scripts/upload-artifact.sh" "$dir.zip"
done

View File

@@ -1,34 +0,0 @@
#!/bin/bash
set -eo pipefail
source "$(dirname "$0")/env.sh"
source "$(realpath $(dirname "$0")/../../scripts/update-submodules.sh)"
{ set +x; } 2>/dev/null
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
mkdir -p build
cd build
mkdir -p tmp_modules tmp_functions js codegen
run_command cmake .. "${CMAKE_FLAGS[@]}" \
-GNinja \
-DBUN_CPP_ONLY="1" \
-DNO_CONFIGURE_DEPENDS="1" \
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
-DCPU_TARGET="$CPU_TARGET" \
-DUSE_LTO="$USE_LTO" \
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
-DCANARY="$CANARY" \
-DGIT_SHA="$GIT_SHA"
chmod +x compile-cpp-only.sh
source compile-cpp-only.sh -v -j "$CPUS"
{ set +x; } 2>/dev/null
cd ..
source "$(dirname "$0")/upload-artifact.sh" "build/bun-cpp-objects.a" --split

View File

@@ -1,22 +0,0 @@
#!/bin/bash
set -eo pipefail
source "$(dirname "$0")/env.sh"
source "$(realpath $(dirname "$0")/../../scripts/all-dependencies.sh)"
artifacts=(
libcrypto.a libssl.a libdecrepit.a
libcares.a
libarchive.a
liblolhtml.a
libmimalloc.a libmimalloc.o
libtcc.a
libz.a
libzstd.a
libdeflate.a
liblshpack.a
)
for artifact in "${artifacts[@]}"; do
source "$(dirname "$0")/upload-artifact.sh" "build/bun-deps/$artifact"
done

View File

@@ -1,40 +0,0 @@
#!/bin/bash
set -eo pipefail
source "$(dirname "$0")/env.sh"
function assert_bun() {
if ! command -v bun &>/dev/null; then
echo "error: bun is not installed" 1>&2
exit 1
fi
}
function assert_make() {
if ! command -v make &>/dev/null; then
echo "error: make is not installed" 1>&2
exit 1
fi
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
function build_node_fallbacks() {
local cwd="src/node-fallbacks"
run_command bun install --cwd "$cwd" --frozen-lockfile
run_command bun run --cwd "$cwd" build
}
function build_old_js() {
run_command bun install --frozen-lockfile
run_command make runtime_js fallback_decoder bun_error
}
assert_bun
assert_make
build_node_fallbacks
build_old_js

View File

@@ -1,80 +0,0 @@
#!/bin/bash
set -eo pipefail
source "$(dirname "$0")/env.sh"
function assert_target() {
local arch="${2-$(uname -m)}"
case "$(echo "$arch" | tr '[:upper:]' '[:lower:]')" in
x64 | x86_64 | amd64)
export ZIG_ARCH="x86_64"
if [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
export ZIG_CPU_TARGET="nehalem"
else
export ZIG_CPU_TARGET="haswell"
fi
;;
aarch64 | arm64)
export ZIG_ARCH="aarch64"
export ZIG_CPU_TARGET="native"
;;
*)
echo "error: Unsupported architecture: $arch" 1>&2
exit 1
;;
esac
local os="${1-$(uname -s)}"
case "$(echo "$os" | tr '[:upper:]' '[:lower:]')" in
linux)
export ZIG_TARGET="$ZIG_ARCH-linux-gnu" ;;
darwin)
export ZIG_TARGET="$ZIG_ARCH-macos-none" ;;
windows)
export ZIG_TARGET="$ZIG_ARCH-windows-msvc" ;;
*)
echo "error: Unsupported operating system: $os" 1>&2
exit 1
;;
esac
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_target "$@"
# Since the zig build depends on files from the zig submodule,
# make sure to update the submodule before building.
run_command git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
# TODO: Move these to be part of the CMake build
source "$(dirname "$0")/build-old-js.sh"
cwd="$(pwd)"
mkdir -p build
cd build
run_command cmake .. "${CMAKE_FLAGS[@]}" \
-GNinja \
-DNO_CONFIGURE_DEPENDS="1" \
-DNO_CODEGEN="0" \
-DWEBKIT_DIR="omit" \
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
-DZIG_LIB_DIR="$cwd/src/deps/zig/lib" \
-DCMAKE_BUILD_TYPE="$CMAKE_BUILD_TYPE" \
-DARCH="$ZIG_ARCH" \
-DCPU_TARGET="$ZIG_CPU_TARGET" \
-DZIG_TARGET="$ZIG_TARGET" \
-DUSE_LTO="$USE_LTO" \
-DUSE_DEBUG_JSC="$USE_DEBUG_JSC" \
-DCANARY="$CANARY" \
-DGIT_SHA="$GIT_SHA"
export ONLY_ZIG="1"
run_command ninja "$cwd/build/bun-zig.o" -v -j "$CPUS"
cd ..
source "$(dirname "$0")/upload-artifact.sh" "build/bun-zig.o"

View File

@@ -1,47 +0,0 @@
param (
[Parameter(Mandatory=$true)]
[string[]] $Paths,
[switch] $Split
)
$ErrorActionPreference = "Stop"
function Assert-Buildkite-Agent() {
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
exit 1
}
}
function Assert-Join-File() {
if (-not (Get-Command "Join-File" -ErrorAction SilentlyContinue)) {
Write-Error "Cannot find Join-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
exit 1
}
}
function Download-Buildkite-Artifact() {
param (
[Parameter(Mandatory=$true)]
[string] $Path,
)
if ($Split) {
& buildkite-agent artifact download "$Path.*" --debug --debug-http
Join-File -Path "$(Resolve-Path .)\$Path" -Verbose -DeletePartFiles
} else {
& buildkite-agent artifact download "$Path" --debug --debug-http
}
if (-not (Test-Path $Path)) {
Write-Error "Could not find artifact: $Path"
exit 1
}
}
Assert-Buildkite-Agent
if ($Split) {
Assert-Join-File
}
foreach ($Path in $Paths) {
Download-Buildkite-Artifact $Path
}

View File

@@ -1,46 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function download_buildkite_artifact() {
local path="$1"; shift
local split="0"
local args=()
while true; do
if [ -z "$1" ]; then
break
fi
case "$1" in
--split) split="1"; shift ;;
*) args+=("$1"); shift ;;
esac
done
if [ "$split" == "1" ]; then
run_command buildkite-agent artifact download "$path.*" . "${args[@]}"
run_command cat $path.?? > "$path"
run_command rm -f $path.??
else
run_command buildkite-agent artifact download "$path" . "${args[@]}"
fi
if [[ "$path" != *"*"* ]] && [ ! -f "$path" ]; then
echo "error: Could not find artifact: $path"
exit 1
fi
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_buildkite_agent
download_buildkite_artifact "$@"

View File

@@ -1,118 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_os() {
local os="$(uname -s)"
case "$os" in
Linux)
echo "linux" ;;
Darwin)
echo "darwin" ;;
*)
echo "error: Unsupported operating system: $os" 1>&2
exit 1
;;
esac
}
function assert_arch() {
local arch="$(uname -m)"
case "$arch" in
aarch64 | arm64)
echo "aarch64" ;;
x86_64 | amd64)
echo "x64" ;;
*)
echo "error: Unknown architecture: $arch" 1>&2
exit 1
;;
esac
}
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
if [ -z "$BUILDKITE_STEP_KEY" ]; then
echo "error: Cannot find step key for this build"
exit 1
fi
if [ -n "$BUILDKITE_GROUP_KEY" ] && [[ "$BUILDKITE_STEP_KEY" != "$BUILDKITE_GROUP_KEY"* ]]; then
echo "error: Build step '$BUILDKITE_STEP_KEY' does not start with group key '$BUILDKITE_GROUP_KEY'"
exit 1
fi
# Skip os and arch checks for Zig, since it's cross-compiled on macOS
if [[ "$BUILDKITE_STEP_KEY" != *"zig"* ]]; then
local os="$(assert_os)"
if [[ "$BUILDKITE_STEP_KEY" != *"$os"* ]]; then
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match operating system '$os'"
exit 1
fi
local arch="$(assert_arch)"
if [[ "$BUILDKITE_STEP_KEY" != *"$arch"* ]]; then
echo "error: Build step '$BUILDKITE_STEP_KEY' does not match architecture '$arch'"
exit 1
fi
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function export_environment() {
source "$(realpath $(dirname "$0")/../../scripts/env.sh)"
{ set +x; } 2>/dev/null
export GIT_SHA="$BUILDKITE_COMMIT"
export CCACHE_DIR="$HOME/.cache/ccache/$BUILDKITE_STEP_KEY"
export SCCACHE_DIR="$HOME/.cache/sccache/$BUILDKITE_STEP_KEY"
export ZIG_LOCAL_CACHE_DIR="$HOME/.cache/zig-cache/$BUILDKITE_STEP_KEY"
export BUN_DEPS_CACHE_DIR="$HOME/.cache/bun-deps/$BUILDKITE_STEP_KEY"
if [ "$(assert_arch)" == "aarch64" ]; then
export CPU_TARGET="native"
elif [[ "$BUILDKITE_STEP_KEY" == *"baseline"* ]]; then
export CPU_TARGET="nehalem"
else
export CPU_TARGET="haswell"
fi
if [[ "$BUILDKITE_STEP_KEY" == *"nolto"* ]]; then
export USE_LTO="OFF"
else
export USE_LTO="ON"
fi
if $(buildkite-agent meta-data exists release &> /dev/null); then
export CMAKE_BUILD_TYPE="$(buildkite-agent meta-data get release)"
else
export CMAKE_BUILD_TYPE="Release"
fi
if $(buildkite-agent meta-data exists canary &> /dev/null); then
export CANARY="$(buildkite-agent meta-data get canary)"
else
export CANARY="1"
fi
if $(buildkite-agent meta-data exists assertions &> /dev/null); then
export USE_DEBUG_JSC="$(buildkite-agent meta-data get assertions)"
else
export USE_DEBUG_JSC="OFF"
fi
if [ "$BUILDKITE_CLEAN_CHECKOUT" == "true" ]; then
rm -rf "$CCACHE_DIR"
rm -rf "$SCCACHE_DIR"
rm -rf "$ZIG_LOCAL_CACHE_DIR"
rm -rf "$BUN_DEPS_CACHE_DIR"
fi
}
assert_build
assert_buildkite_agent
export_environment

View File

@@ -1,97 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_jq() {
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
}
function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_release() {
if [ "$RELEASE" == "1" ]; then
run_command buildkite-agent meta-data set canary "0"
fi
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ]; then
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
if [ "$tag" == "null" ]; then
canary="1"
else
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
if [ "$revision" == "null" ]; then
canary="1"
else
canary="$revision"
fi
fi
run_command buildkite-agent meta-data set canary "$canary"
fi
}
function upload_buildkite_pipeline() {
local path="$1"
if [ ! -f "$path" ]; then
echo "error: Cannot find pipeline: $path"
exit 1
fi
run_command buildkite-agent pipeline upload "$path"
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_release
assert_canary
upload_buildkite_pipeline ".buildkite/ci.yml"

View File

@@ -1,47 +0,0 @@
param (
[Parameter(Mandatory=$true)]
[string[]] $Paths,
[switch] $Split
)
$ErrorActionPreference = "Stop"
function Assert-Buildkite-Agent() {
if (-not (Get-Command "buildkite-agent" -ErrorAction SilentlyContinue)) {
Write-Error "Cannot find buildkite-agent, please install it: https://buildkite.com/docs/agent/v3/install"
exit 1
}
}
function Assert-Split-File() {
if (-not (Get-Command "Split-File" -ErrorAction SilentlyContinue)) {
Write-Error "Cannot find Split-File, please install it: https://www.powershellgallery.com/packages/FileSplitter/1.3"
exit 1
}
}
function Upload-Buildkite-Artifact() {
param (
[Parameter(Mandatory=$true)]
[string] $Path,
)
if (-not (Test-Path $Path)) {
Write-Error "Could not find artifact: $Path"
exit 1
}
if ($Split) {
Remove-Item -Path "$Path.*" -Force
Split-File -Path (Resolve-Path $Path) -PartSizeBytes "50MB" -Verbose
$Path = "$Path.*"
}
& buildkite-agent artifact upload "$Path" --debug --debug-http
}
Assert-Buildkite-Agent
if ($Split) {
Assert-Split-File
}
foreach ($Path in $Paths) {
Upload-Buildkite-Artifact $Path
}

View File

@@ -1,54 +0,0 @@
#!/bin/bash
set -eo pipefail
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_split() {
if ! command -v split &> /dev/null; then
echo "error: Cannot find split, please install it:"
echo "https://www.gnu.org/software/coreutils/split"
exit 1
fi
}
function upload_buildkite_artifact() {
local path="$1"; shift
local split="0"
local args=()
while true; do
if [ -z "$1" ]; then
break
fi
case "$1" in
--split) split="1"; shift ;;
*) args+=("$1"); shift ;;
esac
done
if [ ! -f "$path" ]; then
echo "error: Could not find artifact: $path"
exit 1
fi
if [ "$split" == "1" ]; then
run_command rm -f "$path."*
run_command split -b 50MB -d "$path" "$path."
run_command buildkite-agent artifact upload "$path.*" "${args[@]}"
else
run_command buildkite-agent artifact upload "$path" "${args[@]}"
fi
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_buildkite_agent
upload_buildkite_artifact "$@"

View File

@@ -3,15 +3,7 @@
set -eo pipefail
function assert_main() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
if [[ "$BUILDKITE_PULL_REQUEST_REPO" && "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]]; then
echo "error: Cannot upload release from a fork"
exit 1
fi
@@ -33,158 +25,70 @@ function assert_buildkite_agent() {
fi
}
function assert_github() {
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
assert_buildkite_secret "GITHUB_TOKEN"
# gh expects the token in $GH_TOKEN
export GH_TOKEN="$GITHUB_TOKEN"
}
function assert_aws() {
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
for secret in AWS_ACCESS_KEY_ID AWS_SECRET_ACCESS_KEY AWS_ENDPOINT AWS_BUCKET; do
assert_buildkite_secret "$secret"
done
}
function assert_sentry() {
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
for secret in SENTRY_AUTH_TOKEN SENTRY_ORG SENTRY_PROJECT; do
assert_buildkite_secret "$secret"
done
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
function assert_gh() {
if ! command -v gh &> /dev/null; then
echo "warning: gh is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
brew install gh
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
echo "error: Cannot install gh, please install it:"
echo "https://github.com/cli/cli#installation"
exit 1
fi
fi
}
function assert_buildkite_secret() {
local key="$1"
local value=$(buildkite-agent secret get "$key")
if [ -z "$value" ]; then
echo "error: Cannot find $key secret"
function assert_gh_token() {
local token=$(buildkite-agent secret get GITHUB_TOKEN)
if [ -z "$token" ]; then
echo "error: Cannot find GITHUB_TOKEN secret"
echo ""
echo "hint: Create a secret named $key with a value:"
echo "hint: Create a secret named GITHUB_TOKEN with a GitHub access token:"
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
exit 1
fi
export "$key"="$value"
export GH_TOKEN="$token"
}
function release_tag() {
local version="$1"
if [ "$version" == "canary" ]; then
echo "canary"
else
echo "bun-v$version"
function download_artifact() {
local name=$1
buildkite-agent artifact download "$name" .
if [ ! -f "$name" ]; then
echo "error: Cannot find Buildkite artifact: $name"
exit 1
fi
}
function create_sentry_release() {
local version="$1"
local release="$version"
if [ "$version" == "canary" ]; then
release="$BUILDKITE_COMMIT-canary"
fi
run_command sentry-cli releases new "$release" --finalize
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
if [ "$version" == "canary" ]; then
run_command sentry-cli deploys new --env="canary" --release="$release"
fi
function upload_assets() {
local tag=$1
local files=${@:2}
gh release upload "$tag" $files --clobber --repo "$BUILDKITE_REPO"
}
function download_buildkite_artifacts() {
local dir="$1"
local names="${@:2}"
for name in "${names[@]}"; do
run_command buildkite-agent artifact download "$name" "$dir"
if [ ! -f "$dir/$name" ]; then
echo "error: Cannot find Buildkite artifact: $name"
exit 1
fi
done
}
assert_main
assert_buildkite_agent
assert_gh
assert_gh_token
function upload_github_assets() {
local version="$1"
local tag="$(release_tag "$version")"
local files="${@:2}"
for file in "${files[@]}"; do
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
done
if [ "$version" == "canary" ]; then
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
--notes "This canary release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
fi
}
declare artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
)
function upload_s3_files() {
local folder="$1"
local files="${@:2}"
for file in "${files[@]}"; do
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
done
}
for artifact in "${artifacts[@]}"; do
download_artifact $artifact
done
function create_release() {
assert_main
assert_buildkite_agent
assert_github
assert_sentry
local tag="$1" # 'canary' or 'x.y.z'
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
bun-windows-x64-baseline-profile.zip
)
download_buildkite_artifacts "." "${artifacts[@]}"
upload_s3_files "releases/$BUILDKITE_COMMIT" "${artifacts[@]}"
upload_s3_files "releases/$tag" "${artifacts[@]}"
upload_github_assets "$tag" "${artifacts[@]}"
create_sentry_release "$tag"
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
echo "warn: Skipping release because this is not a canary build"
exit 0
fi
}
assert_canary
create_release "canary"
upload_assets "canary" "${artifacts[@]}"

312
.github/workflows/build-darwin.yml vendored Normal file
View File

@@ -0,0 +1,312 @@
name: Build Darwin
permissions:
contents: read
actions: write
on:
workflow_call:
inputs:
runs-on:
type: string
default: macos-12-large
tag:
type: string
required: true
arch:
type: string
required: true
cpu:
type: string
required: true
assertions:
type: boolean
canary:
type: boolean
no-cache:
type: boolean
env:
LLVM_VERSION: 16
BUN_VERSION: 1.1.8
LC_CTYPE: "en_US.UTF-8"
LC_ALL: "en_US.UTF-8"
jobs:
build-submodules:
name: Build Submodules
runs-on: ${{ inputs.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.gitmodules
src/deps
scripts
- name: Hash Submodules
id: hash
run: |
print_versions() {
git submodule | grep -v WebKit
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
}
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
- if: ${{ !inputs.no-cache }}
name: Restore Cache
id: cache
uses: actions/cache/restore@v4
with:
path: ${{ runner.temp }}/bun-deps
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
# TODO: Figure out how to cache homebrew dependencies
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Install Dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install \
llvm@${{ env.LLVM_VERSION }} \
ccache \
rust \
pkg-config \
coreutils \
libtool \
cmake \
libiconv \
automake \
openssl@1.1 \
ninja \
golang \
gnu-sed --force --overwrite
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Clone Submodules
run: |
./scripts/update-submodules.sh
- name: Build Submodules
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
env:
CPU_TARGET: ${{ inputs.cpu }}
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
run: |
mkdir -p $BUN_DEPS_OUT_DIR
./scripts/all-dependencies.sh
- name: Save Cache
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
uses: actions/cache/save@v4
with:
path: ${{ runner.temp }}/bun-deps
key: ${{ steps.cache.outputs.cache-primary-key }}
- name: Upload bun-${{ inputs.tag }}-deps
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-deps
path: ${{ runner.temp }}/bun-deps
if-no-files-found: error
build-cpp:
name: Build C++
runs-on: ${{ inputs.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
with:
submodules: recursive
# TODO: Figure out how to cache homebrew dependencies
- name: Install Dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install \
llvm@${{ env.LLVM_VERSION }} \
ccache \
rust \
pkg-config \
coreutils \
libtool \
cmake \
libiconv \
automake \
openssl@1.1 \
ninja \
golang \
gnu-sed --force --overwrite
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Compile
env:
CPU_TARGET: ${{ inputs.cpu }}
SOURCE_DIR: ${{ github.workspace }}
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
CCACHE_DIR: ${{ runner.temp }}/ccache
run: |
mkdir -p $OBJ_DIR
cd $OBJ_DIR
cmake -S $SOURCE_DIR -B $OBJ_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
chmod +x compile-cpp-only.sh
./compile-cpp-only.sh -v
- name: Upload bun-${{ inputs.tag }}-cpp
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
if-no-files-found: error
build-zig:
name: Build Zig
uses: ./.github/workflows/build-zig.yml
with:
os: darwin
only-zig: true
tag: ${{ inputs.tag }}
arch: ${{ inputs.arch }}
cpu: ${{ inputs.cpu }}
assertions: ${{ inputs.assertions }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
link:
name: Link
runs-on: ${{ inputs.runs-on }}
needs:
- build-submodules
- build-cpp
- build-zig
steps:
- uses: actions/checkout@v4
# TODO: Figure out how to cache homebrew dependencies
- name: Install Dependencies
env:
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
HOMEBREW_NO_AUTO_UPDATE: 1
HOMEBREW_NO_INSTALL_CLEANUP: 1
run: |
brew install \
llvm@${{ env.LLVM_VERSION }} \
ccache \
rust \
pkg-config \
coreutils \
libtool \
cmake \
libiconv \
automake \
openssl@1.1 \
ninja \
golang \
gnu-sed --force --overwrite
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
brew link --overwrite llvm@$LLVM_VERSION
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ env.BUN_VERSION }}
- name: Download bun-${{ inputs.tag }}-deps
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-deps
path: ${{ runner.temp }}/bun-deps
- name: Download bun-${{ inputs.tag }}-cpp
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-cpp
path: ${{ runner.temp }}/bun-cpp-obj
- name: Download bun-${{ inputs.tag }}-zig
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-zig
path: ${{ runner.temp }}/release
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Link
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ${{ runner.temp }}/ccache
run: |
SRC_DIR=$PWD
mkdir ${{ runner.temp }}/link-build
cd ${{ runner.temp }}/link-build
cmake $SRC_DIR \
-G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ_DIR="${{ runner.temp }}/release" \
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
- name: Prepare
run: |
cd ${{ runner.temp }}/link-build
chmod +x bun-profile bun
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
mv bun bun-${{ inputs.tag }}/bun
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
- name: Upload bun-${{ inputs.tag }}
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip
if-no-files-found: error
- name: Upload bun-${{ inputs.tag }}-profile
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-profile
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip
if-no-files-found: error
on-failure:
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
name: On Failure
needs: link
runs-on: ubuntu-latest
steps:
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: ""
description: |
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**

64
.github/workflows/build-linux.yml vendored Normal file
View File

@@ -0,0 +1,64 @@
name: Build Linux
permissions:
contents: read
actions: write
on:
workflow_call:
inputs:
runs-on:
type: string
required: true
tag:
type: string
required: true
arch:
type: string
required: true
cpu:
type: string
required: true
assertions:
type: boolean
zig-optimize:
type: string
canary:
type: boolean
no-cache:
type: boolean
jobs:
build:
name: Build Linux
uses: ./.github/workflows/build-zig.yml
with:
os: linux
only-zig: false
runs-on: ${{ inputs.runs-on }}
tag: ${{ inputs.tag }}
arch: ${{ inputs.arch }}
cpu: ${{ inputs.cpu }}
assertions: ${{ inputs.assertions }}
zig-optimize: ${{ inputs.zig-optimize }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
on-failure:
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
name: On Failure
needs: build
runs-on: ubuntu-latest
steps:
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: ""
description: |
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**

348
.github/workflows/build-windows.yml vendored Normal file
View File

@@ -0,0 +1,348 @@
name: Build Windows
permissions:
contents: read
actions: write
on:
workflow_call:
inputs:
runs-on:
type: string
default: windows
tag:
type: string
required: true
arch:
type: string
required: true
cpu:
type: string
required: true
assertions:
type: boolean
canary:
type: boolean
no-cache:
type: boolean
bun-version:
type: string
default: 1.1.7
env:
# Must specify exact version of LLVM for Windows
LLVM_VERSION: 18.1.8
BUN_VERSION: ${{ inputs.bun-version }}
BUN_GARBAGE_COLLECTOR_LEVEL: 1
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: 1
CI: true
USE_LTO: 1
jobs:
build-submodules:
name: Build Submodules
runs-on: ${{ inputs.runs-on }}
steps:
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
.gitmodules
src/deps
scripts
- name: Hash Submodules
id: hash
run: |
$data = "$(& {
git submodule | Where-Object { $_ -notmatch 'WebKit' }
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
echo 1
})"
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
echo "hash=${hash}" >> $env:GITHUB_OUTPUT
- if: ${{ !inputs.no-cache }}
name: Restore Cache
id: cache
uses: actions/cache/restore@v4
with:
path: bun-deps
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Install LLVM and Ninja
run: |
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
scoop install nasm@2.16.01
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Clone Submodules
run: |
.\scripts\update-submodules.ps1
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
name: Build Dependencies
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ccache
USE_LTO: 1
run: |
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
.\scripts\all-dependencies.ps1
- name: Save Cache
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
uses: actions/cache/save@v4
with:
path: bun-deps
key: ${{ steps.cache.outputs.cache-primary-key }}
- name: Upload bun-${{ inputs.tag }}-deps
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-deps
path: bun-deps
if-no-files-found: error
codegen:
name: Codegen
runs-on: ubuntu-latest
steps:
- name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ inputs.bun-version }}
- name: Codegen
run: |
./scripts/cross-compile-codegen.sh win32 x64
- if: ${{ inputs.canary }}
name: Calculate Revision
run: |
echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}"
bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
- name: Upload bun-${{ inputs.tag }}-codegen
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-codegen
path: build-codegen-win32-x64
if-no-files-found: error
build-cpp:
name: Build C++
needs: codegen
runs-on: ${{ inputs.runs-on }}
steps:
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install LLVM and Ninja
run: |
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ inputs.bun-version }}
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Download bun-${{ inputs.tag }}-codegen
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-codegen
path: build
- name: Compile
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ccache
USE_LTO: 1
run: |
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
.\scripts\update-submodules.ps1
.\scripts\build-libuv.ps1 -CloneOnly $True
cd build
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DUSE_LTO=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
- name: Upload bun-${{ inputs.tag }}-cpp
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-cpp
path: build/bun-cpp-objects.a
if-no-files-found: error
build-zig:
name: Build Zig
uses: ./.github/workflows/build-zig.yml
with:
os: windows
zig-optimize: ReleaseSafe
only-zig: true
tag: ${{ inputs.tag }}
arch: ${{ inputs.arch }}
cpu: ${{ inputs.cpu }}
assertions: ${{ inputs.assertions }}
canary: ${{ inputs.canary }}
no-cache: ${{ inputs.no-cache }}
link:
name: Link
runs-on: ${{ inputs.runs-on }}
needs:
- build-submodules
- build-cpp
- build-zig
- codegen
steps:
- name: Install Scoop
run: |
Set-ExecutionPolicy -ExecutionPolicy RemoteSigned -Scope CurrentUser
Invoke-RestMethod -Uri https://get.scoop.sh | Invoke-Expression
Join-Path (Resolve-Path ~).Path "scoop\shims" >> $Env:GITHUB_PATH
- name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
submodules: recursive
- name: Install Ninja
run: |
scoop install ninja
scoop install llvm@${{ env.LLVM_VERSION }}
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: ${{ inputs.bun-version }}
- name: Download bun-${{ inputs.tag }}-deps
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-deps
path: bun-deps
- name: Download bun-${{ inputs.tag }}-cpp
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-cpp
path: bun-cpp
- name: Download bun-${{ inputs.tag }}-zig
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-zig
path: bun-zig
- name: Download bun-${{ inputs.tag }}-codegen
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}-codegen
path: build
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
path: ccache
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
restore-keys: |
bun-${{ inputs.tag }}-cpp-
- name: Link
env:
CPU_TARGET: ${{ inputs.cpu }}
CCACHE_DIR: ccache
run: |
.\scripts\update-submodules.ps1
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
Set-Location build
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
-DUSE_LTO=1 `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path ../bun-zig)" `
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
ninja -v
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
- name: Prepare
run: |
$Dist = mkdir -Force "bun-${{ inputs.tag }}"
cp -r build\bun.exe "$Dist\bun.exe"
Compress-Archive -Force "$Dist" "${Dist}.zip"
$Dist = "$Dist-profile"
MkDir -Force "$Dist"
cp -r build\bun.exe "$Dist\bun.exe"
cp -r build\bun.pdb "$Dist\bun.pdb"
Compress-Archive -Force "$Dist" "$Dist.zip"
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
- name: Upload bun-${{ inputs.tag }}
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: bun-${{ inputs.tag }}.zip
if-no-files-found: error
- name: Upload bun-${{ inputs.tag }}-profile
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-profile
path: bun-${{ inputs.tag }}-profile.zip
if-no-files-found: error
- name: Upload bun-feature-data
uses: actions/upload-artifact@v4
with:
name: bun-feature-data
path: features.json
if-no-files-found: error
overwrite: true
on-failure:
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
name: On Failure
needs: link
runs-on: ubuntu-latest
steps:
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: ""
description: |
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**

122
.github/workflows/build-zig.yml vendored Normal file
View File

@@ -0,0 +1,122 @@
name: Build Zig
permissions:
contents: read
actions: write
on:
workflow_call:
inputs:
runs-on:
type: string
default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }}
tag:
type: string
required: true
os:
type: string
required: true
arch:
type: string
required: true
cpu:
type: string
required: true
assertions:
type: boolean
default: false
zig-optimize:
type: string # 'ReleaseSafe' or 'ReleaseFast'
default: ReleaseFast
canary:
type: boolean
default: ${{ github.ref == 'refs/heads/main' }}
only-zig:
type: boolean
default: true
no-cache:
type: boolean
default: false
jobs:
build-zig:
name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }}
runs-on: ${{ inputs.runs-on }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Calculate Cache Key
id: cache
run: |
echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}" >> $GITHUB_OUTPUT
- if: ${{ !inputs.no-cache }}
name: Restore Cache
uses: actions/cache@v4
with:
key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }}
restore-keys: |
bun-${{ inputs.tag }}-docker-
path: |
${{ runner.temp }}/dockercache
- name: Setup Docker
uses: docker/setup-buildx-action@v3
with:
install: true
platforms: |
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
- name: Build
uses: docker/build-push-action@v5
with:
push: false
target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }}
cache-from: |
type=local,src=${{ runner.temp }}/dockercache
cache-to: |
type=local,dest=${{ runner.temp }}/dockercache,mode=max
outputs: |
type=local,dest=${{ runner.temp }}/release
platforms: |
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
build-args: |
GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }}
TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }}
ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }}
BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }}
ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }}
ZIG_OPTIMIZE=${{ inputs.zig-optimize }}
CANARY=${{ inputs.canary && '1' || '0' }}
- if: ${{ inputs.only-zig }}
name: Upload bun-${{ inputs.tag }}-zig
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-zig
path: ${{ runner.temp }}/release/bun-zig.o
if-no-files-found: error
- if: ${{ !inputs.only-zig }}
name: Prepare
run: |
cd ${{ runner.temp }}/release
chmod +x bun-profile bun
mkdir bun-${{ inputs.tag }}-profile
mkdir bun-${{ inputs.tag }}
strip bun
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
mv bun bun-${{ inputs.tag }}/bun
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
- if: ${{ !inputs.only-zig }}
name: Upload bun-${{ inputs.tag }}
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip
if-no-files-found: error
- if: ${{ !inputs.only-zig }}
name: Upload bun-${{ inputs.tag }}-profile
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-profile
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip
if-no-files-found: error

245
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,245 @@
name: CI
permissions:
contents: read
actions: write
concurrency:
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
run-id:
type: string
description: The workflow ID to download artifacts (skips the build step)
pull_request:
paths-ignore:
- .vscode/**/*
- docs/**/*
- examples/**/*
push:
branches:
- main
paths-ignore:
- .vscode/**/*
- docs/**/*
- examples/**/*
jobs:
format:
if: ${{ !inputs.run-id }}
name: Format
uses: ./.github/workflows/run-format.yml
secrets: inherit
with:
zig-version: 0.13.0
permissions:
contents: write
lint:
if: ${{ !inputs.run-id }}
name: Lint
uses: ./.github/workflows/run-lint.yml
secrets: inherit
linux-x64:
if: ${{ !inputs.run-id }}
name: Build linux-x64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
arch: x64
cpu: haswell
canary: true
no-cache: true
linux-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build linux-x64-baseline
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
arch: x64
cpu: nehalem
canary: true
no-cache: true
linux-aarch64:
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
name: Build linux-aarch64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
arch: aarch64
cpu: native
canary: true
no-cache: true
darwin-x64:
if: ${{ !inputs.run-id }}
name: Build darwin-x64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
arch: x64
cpu: haswell
canary: true
darwin-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build darwin-x64-baseline
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64-baseline
arch: x64
cpu: nehalem
canary: true
darwin-aarch64:
if: ${{ !inputs.run-id }}
name: Build darwin-aarch64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
arch: aarch64
cpu: native
canary: true
windows-x64:
if: ${{ !inputs.run-id }}
name: Build windows-x64
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64
arch: x64
cpu: haswell
canary: true
windows-x64-baseline:
if: ${{ !inputs.run-id }}
name: Build windows-x64-baseline
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64-baseline
arch: x64
cpu: nehalem
canary: true
linux-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test linux-x64
needs: linux-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
linux-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test linux-x64-baseline
needs: linux-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
linux-aarch64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
name: Test linux-aarch64
needs: linux-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
darwin-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-x64
needs: darwin-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
darwin-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-x64-baseline
needs: darwin-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64-baseline
darwin-aarch64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test darwin-aarch64
needs: darwin-aarch64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
windows-x64-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test windows-x64
needs: windows-x64
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: windows
tag: windows-x64
windows-x64-baseline-test:
if: ${{ inputs.run-id || github.event_name == 'pull_request' }}
name: Test windows-x64-baseline
needs: windows-x64-baseline
uses: ./.github/workflows/run-test.yml
secrets: inherit
with:
run-id: ${{ inputs.run-id }}
pr-number: ${{ github.event.number }}
runs-on: windows
tag: windows-x64-baseline
cleanup:
if: ${{ always() }}
name: Cleanup
needs:
- linux-x64
- linux-x64-baseline
- linux-aarch64
- darwin-x64
- darwin-x64-baseline
- darwin-aarch64
- windows-x64
- windows-x64-baseline
runs-on: ubuntu-latest
steps:
- name: Cleanup Artifacts
uses: geekyeggo/delete-artifact@v5
with:
name: |
bun-*-cpp
bun-*-zig
bun-*-deps
bun-*-codegen

55
.github/workflows/comment.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
name: Comment
permissions:
actions: read
pull-requests: write
on:
workflow_run:
workflows:
- CI
types:
- completed
jobs:
comment:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Comment
runs-on: ubuntu-latest
steps:
- name: Download Tests
uses: actions/download-artifact@v4
with:
path: bun
pattern: bun-*-tests
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Setup Environment
id: env
shell: bash
run: |
echo "pr-number=$(<bun/bun-linux-x64-tests/pr-number.txt)" >> $GITHUB_OUTPUT
- name: Generate Comment
run: |
cat bun/bun-*-tests/comment.md > comment.md
if [ -s comment.md ]; then
echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md
else
echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md
fi
echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md
echo -e "<!-- generated-comment workflow=${{ github.workflow }} -->" >> comment.md
- name: Find Comment
id: comment
uses: peter-evans/find-comment@v3
with:
issue-number: ${{ steps.env.outputs.pr-number }}
comment-author: github-actions[bot]
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
- name: Write Comment
uses: peter-evans/create-or-update-comment@v4
with:
comment-id: ${{ steps.comment.outputs.comment-id }}
issue-number: ${{ steps.env.outputs.pr-number }}
body-path: comment.md
edit-mode: replace

View File

@@ -0,0 +1,183 @@
name: Create Release Build
run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }}
concurrency:
group: release
cancel-in-progress: true
permissions:
contents: write
actions: write
on:
workflow_dispatch:
inputs:
version:
type: string
required: true
description: "Release version. Example: 1.1.4. Exclude the 'v' prefix."
tag:
type: string
required: true
description: "GitHub tag to use"
clobber:
type: boolean
required: false
default: false
description: "Overwrite existing release artifacts?"
release:
types:
- created
jobs:
notify-start:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Notify Start
runs-on: ubuntu-latest
steps:
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }}
nodetail: true
color: "#1F6FEB"
title: "Bun v${{ inputs.version }} is compiling"
description: |
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
nodetail: true
color: "#1F6FEB"
title: "Bun v${{ inputs.version }} is compiling"
description: |
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
linux-x64:
name: Build linux-x64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64
arch: x64
cpu: haswell
canary: false
linux-x64-baseline:
name: Build linux-x64-baseline
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
tag: linux-x64-baseline
arch: x64
cpu: nehalem
canary: false
linux-aarch64:
name: Build linux-aarch64
uses: ./.github/workflows/build-linux.yml
secrets: inherit
with:
runs-on: namespace-profile-bun-ci-linux-aarch64
tag: linux-aarch64
arch: aarch64
cpu: native
canary: false
darwin-x64:
name: Build darwin-x64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64
arch: x64
cpu: haswell
canary: false
darwin-x64-baseline:
name: Build darwin-x64-baseline
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
tag: darwin-x64-baseline
arch: x64
cpu: nehalem
canary: false
darwin-aarch64:
name: Build darwin-aarch64
uses: ./.github/workflows/build-darwin.yml
secrets: inherit
with:
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
tag: darwin-aarch64
arch: aarch64
cpu: native
canary: false
windows-x64:
name: Build windows-x64
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64
arch: x64
cpu: haswell
canary: false
windows-x64-baseline:
name: Build windows-x64-baseline
uses: ./.github/workflows/build-windows.yml
secrets: inherit
with:
runs-on: windows
tag: windows-x64-baseline
arch: x64
cpu: nehalem
canary: false
upload-artifacts:
needs:
- linux-x64
- linux-x64-baseline
- linux-aarch64
- darwin-x64
- darwin-x64-baseline
- darwin-aarch64
- windows-x64
- windows-x64-baseline
runs-on: ubuntu-latest
steps:
- name: Download Artifacts
uses: actions/download-artifact@v4
with:
path: bun-releases
pattern: bun-*
merge-multiple: true
github-token: ${{ github.token }}
- name: Check for Artifacts
run: |
if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then
echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist."
exit 1 # Fail the job if the condition is met
else
echo "Artifacts downloaded successfully."
fi
- name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: "Bun v${{ inputs.version }} release artifacts uploaded"
- name: "Upload Artifacts"
env:
GH_TOKEN: ${{ github.token }}
run: |
# Unzip one level deep each artifact
cd bun-releases
for f in *.zip; do
unzip -o $f
done
cd ..
gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip

View File

@@ -1,6 +1,3 @@
# TODO: Move this to bash scripts intead of Github Actions
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
name: Release
concurrency: release
@@ -66,7 +63,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.20"
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Sign Release
@@ -91,7 +88,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.20"
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Release
@@ -120,7 +117,7 @@ jobs:
if: ${{ env.BUN_VERSION != 'canary' }}
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.20"
bun-version: "1.0.21"
- name: Setup Bun
if: ${{ env.BUN_VERSION == 'canary' }}
uses: ./.github/actions/setup-bun
@@ -262,7 +259,7 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.20"
bun-version: "1.0.21"
- name: Install Dependencies
run: bun install
- name: Release
@@ -273,24 +270,6 @@ jobs:
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
AWS_BUCKET: bun
notify-sentry:
name: Notify Sentry
runs-on: ubuntu-latest
needs: s3
steps:
- name: Notify Sentry
uses: getsentry/action-release@v1.7.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
with:
ignore_missing: true
ignore_empty: true
version: ${{ env.BUN_VERSION }}
environment: production
bump:
name: "Bump version"
runs-on: ubuntu-latest

View File

@@ -29,9 +29,9 @@ jobs:
- name: Setup Bun
uses: ./.github/actions/setup-bun
with:
bun-version: "1.1.20"
bun-version: "1.1.8"
- name: Setup Zig
uses: mlugg/setup-zig@v1
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
with:
version: ${{ inputs.zig-version }}
- name: Install Dependencies

View File

@@ -17,7 +17,7 @@ on:
jobs:
lint-cpp:
name: Lint C++
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-13' }}
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-13-xlarge' || 'macos-12' }}
steps:
- name: Checkout
uses: actions/checkout@v4

224
.github/workflows/run-test.yml vendored Normal file
View File

@@ -0,0 +1,224 @@
name: Test
permissions:
contents: read
actions: read
on:
workflow_call:
inputs:
runs-on:
type: string
required: true
tag:
type: string
required: true
pr-number:
type: string
required: true
run-id:
type: string
default: ${{ github.run_id }}
jobs:
test:
name: Tests
runs-on: ${{ inputs.runs-on }}
steps:
- if: ${{ runner.os == 'Windows' }}
name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
package.json
bun.lockb
test
packages/bun-internal-test
packages/bun-types
- name: Setup Environment
shell: bash
run: |
echo "${{ inputs.pr-number }}" > pr-number.txt
- name: Download Bun
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: bun
github-token: ${{ github.token }}
run-id: ${{ inputs.run-id || github.run_id }}
- name: Download pnpm
uses: pnpm/action-setup@v4
with:
version: 8
- if: ${{ runner.os != 'Windows' }}
name: Setup Bun
shell: bash
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $GITHUB_PATH
- if: ${{ runner.os == 'Windows' }}
name: Setup Cygwin
uses: secondlife/setup-cygwin@v3
with:
packages: bash
- if: ${{ runner.os == 'Windows' }}
name: Setup Bun (Windows)
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $env:GITHUB_PATH
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Install Dependencies
timeout-minutes: 5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
bun install
- name: Install Dependencies (test)
timeout-minutes: 5
run: |
bun install --cwd test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Dependencies (runner)
timeout-minutes: 5
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
bun install --cwd packages/bun-internal-test
- name: Run Tests
id: test
timeout-minutes: 90
shell: bash
env:
IS_BUN_CI: 1
TMPDIR: ${{ runner.temp }}
BUN_TAG: ${{ inputs.tag }}
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
TEST_INFO_AZURE_SERVICE_BUS: ${{ secrets.TEST_INFO_AZURE_SERVICE_BUS }}
SHELLOPTS: igncr
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
- if: ${{ always() }}
name: Upload Results
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-tests
path: |
test-report.*
comment.md
pr-number.txt
if-no-files-found: error
overwrite: true
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
name: Send Message
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nodetail: true
color: "#FF0000"
title: ""
description: |
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
${{ steps.test.outputs.failing_tests }}
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
- name: Fail
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
run: |
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
exit 1
test-node:
name: Node.js Tests
# TODO: enable when we start paying attention to the results. In the meantime, this causes CI to queue jobs wasting developer time.
if: 0
runs-on: ${{ inputs.runs-on }}
steps:
- if: ${{ runner.os == 'Windows' }}
name: Setup Git
run: |
git config --global core.autocrlf false
git config --global core.eol lf
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
test/node.js
- name: Setup Environment
shell: bash
run: |
echo "${{ inputs.pr-number }}" > pr-number.txt
- name: Download Bun
uses: actions/download-artifact@v4
with:
name: bun-${{ inputs.tag }}
path: bun
github-token: ${{ github.token }}
run-id: ${{ inputs.run-id || github.run_id }}
- if: ${{ runner.os != 'Windows' }}
name: Setup Bun
shell: bash
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $GITHUB_PATH
- if: ${{ runner.os == 'Windows' }}
name: Setup Cygwin
uses: secondlife/setup-cygwin@v3
with:
packages: bash
- if: ${{ runner.os == 'Windows' }}
name: Setup Bun (Windows)
run: |
unzip bun/bun-*.zip
cd bun-*
pwd >> $env:GITHUB_PATH
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: 20
- name: Checkout Tests
shell: bash
working-directory: test/node.js
run: |
node runner.mjs --pull
- name: Install Dependencies
timeout-minutes: 5
shell: bash
working-directory: test/node.js
run: |
bun install
- name: Run Tests
timeout-minutes: 10 # Increase when more tests are added
shell: bash
working-directory: test/node.js
env:
TMPDIR: ${{ runner.temp }}
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
run: |
node runner.mjs
- name: Upload Results
uses: actions/upload-artifact@v4
with:
name: bun-${{ inputs.tag }}-node-tests
path: |
test/node.js/summary/*.json
if-no-files-found: error
overwrite: true

82
.github/workflows/upload.yml vendored Normal file
View File

@@ -0,0 +1,82 @@
name: Upload Artifacts
run-name: Canary release ${{github.sha}} upload
permissions:
contents: write
on:
workflow_run:
workflows:
- CI
types:
- completed
branches:
- main
jobs:
upload:
if: ${{ github.repository_owner == 'oven-sh' }}
name: Upload Artifacts
runs-on: ubuntu-latest
steps:
- name: Download Artifacts
uses: actions/download-artifact@v4
with:
path: bun
pattern: bun-*
merge-multiple: true
github-token: ${{ github.token }}
run-id: ${{ github.event.workflow_run.id }}
- name: Check for Artifacts
run: |
if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then
echo "Error: No artifacts were downloaded or 'bun' directory does not exist."
exit 1 # Fail the job if the condition is met
else
echo "Artifacts downloaded successfully."
fi
- name: Upload to GitHub Releases
uses: ncipollo/release-action@v1
with:
tag: canary
name: Canary (${{ github.sha }})
prerelease: true
body: This canary release of Bun corresponds to the commit [${{ github.sha }}]
allowUpdates: true
replacesArtifacts: true
generateReleaseNotes: true
artifactErrorsFailBuild: true
artifacts: bun/**/bun-*.zip
token: ${{ github.token }}
- name: Upload to S3 (using SHA)
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
with:
endpoint: ${{ secrets.AWS_ENDPOINT }}
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
aws_bucket: ${{ secrets.AWS_BUCKET }}
source_dir: bun
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}-canary
- name: Upload to S3 (using tag)
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
with:
endpoint: ${{ secrets.AWS_ENDPOINT }}
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
aws_bucket: ${{ secrets.AWS_BUCKET }}
source_dir: bun
destination_dir: releases/canary
- name: Announce on Discord
uses: sarisia/actions-status-discord@v1
with:
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
nodetail: true
color: "#1F6FEB"
title: "New Bun Canary available"
url: https://github.com/oven-sh/bun/commit/${{ github.sha }}
description: |
A new canary build of Bun has been automatically uploaded. To upgrade, run:
```sh
bun upgrade --canary
# bun upgrade --stable <- to downgrade
```

1
.gitignore vendored
View File

@@ -145,4 +145,3 @@ zig-cache
zig-out
test/node.js/upstream
.zig-cache
scripts/env.local

4
.gitmodules vendored
View File

@@ -82,7 +82,3 @@ url = https://github.com/oven-sh/zig
depth = 1
shallow = true
fetchRecurseSubmodules = false
[submodule "src/deps/libdeflate"]
path = src/deps/libdeflate
url = https://github.com/ebiggers/libdeflate
ignore = "dirty"

View File

@@ -1,2 +0,0 @@
command script import src/deps/zig/tools/lldb_pretty_printers.py
command script import src/bun.js/WebKit/Tools/lldb/lldb_webkit.py

5
.vscode/launch.json generated vendored
View File

@@ -17,7 +17,6 @@
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
@@ -146,8 +145,8 @@
"request": "launch",
"name": "bun run [file]",
"program": "${workspaceFolder}/build/bun-debug",
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"args": ["run", "/Users/dave/Downloads/pd-api-testnet/dist/app.js"],
"cwd": "/Users/dave/Downloads/pd-api-testnet",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",

View File

@@ -42,11 +42,8 @@
"editor.defaultFormatter": "ziglang.vscode-zig",
},
// lldb
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
"lldb.verboseLogging": false,
// C++
"lldb.verboseLogging": false,
"cmake.configureOnOpen": false,
"C_Cpp.errorSquiggles": "enabled",
"[cpp]": {

View File

@@ -3,8 +3,8 @@ cmake_policy(SET CMP0091 NEW)
cmake_policy(SET CMP0067 NEW)
set(CMAKE_POLICY_DEFAULT_CMP0069 NEW)
set(Bun_VERSION "1.1.22")
set(WEBKIT_TAG f9a0fda2d2b2fd001a00bfcf8e7917a56b382516)
set(Bun_VERSION "1.1.19")
set(WEBKIT_TAG f8283468dcb71ce463839b06a57e1e5a687d2b38)
set(BUN_WORKDIR "${CMAKE_CURRENT_BINARY_DIR}")
message(STATUS "Configuring Bun ${Bun_VERSION} in ${BUN_WORKDIR}")
@@ -15,6 +15,7 @@ set(CMAKE_C_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_C_STANDARD_REQUIRED ON)
# Should not start with v
# Used in process.version, process.versions.node, napi, and elsewhere
set(REPORTED_NODEJS_VERSION "22.3.0")
@@ -22,7 +23,6 @@ set(REPORTED_NODEJS_VERSION "22.3.0")
# If we do not set this, it will crash at startup on the first memory allocation.
if(NOT WIN32 AND NOT APPLE)
set(CMAKE_CXX_EXTENSIONS ON)
set(CMAKE_POSITION_INDEPENDENT_CODE FALSE)
endif()
# --- Build Type ---
@@ -39,7 +39,7 @@ else()
message(STATUS "The CMake build type is: ${CMAKE_BUILD_TYPE}")
endif()
if(WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
if (WIN32 AND NOT CMAKE_CL_SHOWINCLUDES_PREFIX)
# workaround until cmake fix is shipped https://github.com/ninja-build/ninja/issues/2280
# './build/.ninja_deps' may need to be deleted, the bug is "Note: including file: ..." is saved
# as part of some file paths
@@ -58,8 +58,11 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
if(WIN32)
# Debug symbols are in a separate file: bun.pdb
# lld-link will strip it for you, so we can build directly to bun.exe
set(bun "bun")
# TODO(@paperdave): Remove this
# it is enabled for the time being to make sure to catch more bugs in the experimental windows builds
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
if(ZIG_OPTIMIZE STREQUAL "Debug")
@@ -72,7 +75,7 @@ endif()
# --- MacOS SDK ---
if(APPLE AND DEFINED ENV{CI})
set(CMAKE_OSX_DEPLOYMENT_TARGET "13.0")
set(CMAKE_OSX_DEPLOYMENT_TARGET "12.0")
endif()
if(APPLE AND NOT CMAKE_OSX_DEPLOYMENT_TARGET)
@@ -116,7 +119,7 @@ endif()
# we do some extra work afterwards to double-check, and we will rerun BUN_FIND_LLVM if the compiler did not match.
#
# If the user passes -DLLVM_PREFIX, most of this logic is skipped, but we still warn if invalid.
if(WIN32 OR APPLE)
if(WIN32)
set(LLVM_VERSION 18)
else()
set(LLVM_VERSION 16)
@@ -154,12 +157,11 @@ macro(BUN_FIND_LLVM)
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
)
find_program(
STRIP
NAMES strip
PATHS ENV PATH ${PLATFORM_LLVM_SEARCH_PATHS}
DOC "Path to strip binary"
DOC "Path to LLVM ${LLVM_VERSION}'s llvm-strip binary"
)
find_program(
DSYMUTIL
@@ -310,7 +312,6 @@ endif()
# -- Build Flags --
option(USE_STATIC_SQLITE "Statically link SQLite?" ${DEFAULT_ON_UNLESS_APPLE})
option(USE_CUSTOM_ZLIB "Use Bun's recommended version of zlib" ON)
option(USE_CUSTOM_LIBDEFLATE "Use Bun's recommended version of libdeflate" ON)
option(USE_CUSTOM_BORINGSSL "Use Bun's recommended version of BoringSSL" ON)
option(USE_CUSTOM_LIBARCHIVE "Use Bun's recommended version of libarchive" ON)
option(USE_CUSTOM_MIMALLOC "Use Bun's recommended version of Mimalloc" ON)
@@ -332,11 +333,6 @@ option(USE_STATIC_LIBATOMIC "Statically link libatomic, requires the presence of
option(USE_LTO "Enable Link-Time Optimization" ${DEFAULT_LTO})
if(APPLE AND USE_LTO)
set(USE_LTO OFF)
message(WARNING "Link-Time Optimization is not supported on macOS because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)")
endif()
if(WIN32 AND USE_LTO)
set(CMAKE_LINKER_TYPE LLD)
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF)
@@ -653,6 +649,16 @@ file(GLOB BUN_CPP ${CONFIGURE_DEPENDS}
)
list(APPEND BUN_RAW_SOURCES ${BUN_CPP})
# -- Brotli --
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
"${BROTLI_SRC}/common/*.c"
"${BROTLI_SRC}/enc/*.c"
"${BROTLI_SRC}/dec/*.c"
)
list(APPEND BUN_RAW_SOURCES ${BROTLI_FILES})
include_directories("${BUN_DEPS_DIR}/brotli/include")
# -- uSockets --
set(USOCKETS_SRC "${CMAKE_CURRENT_SOURCE_DIR}/packages/bun-usockets/src")
file(GLOB USOCKETS_FILES ${CONFIGURE_DEPENDS}
@@ -868,24 +874,13 @@ file(GLOB ZIG_FILES
"${BUN_SRC}/*/*/*/*/*.zig"
)
if(NOT BUN_ZIG_OBJ_FORMAT)
# To use LLVM bitcode from Zig, more work needs to be done. Currently, an install of
# LLVM 18.1.7 does not compatible with what bitcode Zig 0.13 outputs (has LLVM 18.1.7)
# Change to "bc" to experiment, "Invalid record" means it is not valid output.
set(BUN_ZIG_OBJ_FORMAT "obj")
endif()
if(NOT BUN_ZIG_OBJ_DIR)
set(BUN_ZIG_OBJ_DIR "${BUN_WORKDIR}/CMakeFiles")
endif()
get_filename_component(BUN_ZIG_OBJ_DIR "${BUN_ZIG_OBJ_DIR}" REALPATH BASE_DIR "${CMAKE_BINARY_DIR}")
if(WIN32)
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
else()
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
endif()
set(BUN_ZIG_OBJ "${BUN_ZIG_OBJ_DIR}/bun-zig.o")
set(USES_TERMINAL_NOT_IN_CI "")
@@ -900,7 +895,6 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
"${ZIG_COMPILER}" "build" "obj"
"--zig-lib-dir" "${ZIG_LIB_DIR}"
"--prefix" "${BUN_ZIG_OBJ_DIR}"
"--verbose"
"-Dgenerated-code=${BUN_WORKDIR}/codegen"
"-freference-trace=10"
"-Dversion=${Bun_VERSION}"
@@ -910,7 +904,6 @@ if(NOT BUN_LINK_ONLY AND NOT BUN_CPP_ONLY)
"-Dtarget=${ZIG_TARGET}"
"-Denable_logs=${ENABLE_LOGS}"
"-Dreported_nodejs_version=${REPORTED_NODEJS_VERSION}"
"-Dobj_format=${BUN_ZIG_OBJ_FORMAT}"
DEPENDS
"${CMAKE_CURRENT_SOURCE_DIR}/build.zig"
"${ZIG_FILES}"
@@ -1000,20 +993,8 @@ add_compile_definitions(
)
if(NOT ASSERT_ENABLED)
if(APPLE)
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=0")
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE")
endif()
add_compile_definitions("NDEBUG=1")
else()
if(APPLE)
add_compile_definitions("_LIBCXX_ENABLE_ASSERTIONS=1")
add_compile_definitions("_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_DEBUG")
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
add_compile_definitions("_GLIBCXX_ASSERTIONS=1")
endif()
add_compile_definitions("ASSERT_ENABLED=1")
endif()
@@ -1099,7 +1080,7 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(NOT WIN32)
if(USE_LTO)
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm" "-fwhole-program-vtables" "-fforce-emit-vtables")
list(APPEND LTO_FLAG "-flto=full" "-emit-llvm")
endif()
# Leave -Werror=unused off in release builds so we avoid errors from being used in ASSERT
@@ -1120,36 +1101,13 @@ elseif(CMAKE_BUILD_TYPE STREQUAL "Release")
if(USE_LTO)
target_compile_options(${bun} PUBLIC -Xclang -emit-llvm-bc)
# -emit-llvm seems to not be supported or under a different name on Windows.
list(APPEND LTO_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "-flto=full")
list(APPEND LTO_LINK_FLAG "/LTCG")
list(APPEND LTO_LINK_FLAG "/OPT:REF")
list(APPEND LTO_LINK_FLAG "/OPT:NOICF")
endif()
target_compile_options(${bun} PUBLIC
/O2
${LTO_FLAG}
/Gy
/Gw
/GF
/GA
)
target_link_options(${bun} PUBLIC
${LTO_LINK_FLAG}
/DEBUG:FULL
/delayload:ole32.dll
/delayload:WINMM.dll
/delayload:dbghelp.dll
/delayload:VCRUNTIME140_1.dll
# libuv loads these two immediately, but for some reason it seems to still be slightly faster to delayload them
/delayload:WS2_32.dll
/delayload:WSOCK32.dll
/delayload:ADVAPI32.dll
/delayload:IPHLPAPI.dll
)
target_compile_options(${bun} PUBLIC /O2 ${LTO_FLAG})
target_link_options(${bun} PUBLIC ${LTO_LINK_FLAG} /DEBUG:FULL)
endif()
endif()
@@ -1167,11 +1125,6 @@ else()
# On arm macOS, we can set it to a minimum of the M1 cpu set. this might be the default already.
target_compile_options(${bun} PUBLIC "-mcpu=apple-m1")
endif()
if(NOT WIN32 AND NOT APPLE AND ARCH STREQUAL "aarch64")
# on arm64 linux, we set a minimum of armv8
target_compile_options(${bun} PUBLIC -march=armv8-a+crc -mtune=ampere1)
endif()
endif()
target_compile_options(${bun} PUBLIC -ferror-limit=${ERROR_LIMIT})
@@ -1185,29 +1138,24 @@ if(WIN32)
"BORINGSSL_NO_CXX=1" # lol
)
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded$<$<CONFIG:Debug>:Debug>")
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
# set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreaded")
set_property(TARGET ${bun} PROPERTY MSVC_RUNTIME_LIBRARY "MultiThreadedDLL")
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-" -Xclang -fno-c++-static-destructors)
target_compile_options(${bun} PUBLIC "/EHsc" "/GR-")
target_link_options(${bun} PUBLIC "/STACK:0x1200000,0x100000" "/DEF:${BUN_SRC}/symbols.def" "/errorlimit:0")
else()
target_compile_options(${bun} PUBLIC
-fPIC
-mtune=${CPU_TARGET}
-fconstexpr-steps=2542484
-fconstexpr-depth=54
-fno-exceptions
-fno-asynchronous-unwind-tables
-fno-unwind-tables
-fno-c++-static-destructors
-fvisibility=hidden
-fvisibility-inlines-hidden
-fno-rtti
-fno-omit-frame-pointer
-mno-omit-leaf-frame-pointer
-fno-pic
-fno-pie
-faddrsig
)
endif()
@@ -1223,12 +1171,10 @@ endif()
if(UNIX AND NOT APPLE)
target_link_options(${bun} PUBLIC
-fuse-ld=lld-${LLVM_VERSION}
-fno-pic
-static-libstdc++
-static-libgcc
"-Wl,-no-pie"
"-Wl,-icf=safe"
"-fuse-ld=lld"
"-static-libstdc++"
"-static-libgcc"
"-Wl,-z,now"
"-Wl,--as-needed"
"-Wl,--gc-sections"
"-Wl,-z,stack-size=12800000"
@@ -1257,8 +1203,6 @@ if(UNIX AND NOT APPLE)
"-rdynamic"
"-Wl,--dynamic-list=${BUN_SRC}/symbols.dyn"
"-Wl,--version-script=${BUN_SRC}/linker.lds"
-Wl,-z,lazy
-Wl,-z,norelro
)
target_link_libraries(${bun} PRIVATE "c")
@@ -1292,16 +1236,12 @@ endif()
# --- Stripped Binary "bun"
if(CMAKE_BUILD_TYPE STREQUAL "Release" AND NOT WIN32 AND NOT ASSERT_ENABLED)
# if(CI AND APPLE)
if(APPLE)
add_custom_command(
TARGET ${bun}
POST_BUILD
COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/${bun}.dSYM ${BUN_WORKDIR}/${bun}
COMMENT "Generating .dSYM"
)
endif()
# add_custom_command(
# TARGET ${bun}
# POST_BUILD
# COMMAND ${DSYMUTIL} -o ${BUN_WORKDIR}/bun.dSYM ${BUN_WORKDIR}/${bun}
# COMMENT "Stripping Symbols"
# )
add_custom_command(
TARGET ${bun}
POST_BUILD
@@ -1371,19 +1311,6 @@ else()
target_link_libraries(${bun} PRIVATE LibArchive::LibArchive)
endif()
if(USE_CUSTOM_LIBDEFLATE)
include_directories(${BUN_DEPS_DIR}/libdeflate)
if(WIN32)
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/deflate.lib")
else()
target_link_libraries(${bun} PRIVATE "${BUN_DEPS_OUT_DIR}/libdeflate.a")
endif()
else()
find_package(LibDeflate REQUIRED)
target_link_libraries(${bun} PRIVATE LibDeflate::LibDeflate)
endif()
if(USE_CUSTOM_MIMALLOC)
include_directories(${BUN_DEPS_DIR}/mimalloc/include)
@@ -1482,11 +1409,6 @@ if(USE_STATIC_SQLITE)
"SQLITE_ENABLE_JSON1=1"
"SQLITE_ENABLE_MATH_FUNCTIONS=1"
)
if(WIN32)
target_compile_options(sqlite3 PRIVATE /MT /U_DLL)
endif()
target_link_libraries(${bun} PRIVATE sqlite3)
message(STATUS "Using static sqlite3")
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=0")
@@ -1495,24 +1417,6 @@ else()
target_compile_definitions(${bun} PRIVATE "LAZY_LOAD_SQLITE=1")
endif()
# -- Brotli --
set(BROTLI_SRC "${CMAKE_CURRENT_SOURCE_DIR}/src/deps/brotli")
file(GLOB BROTLI_FILES ${CONFIGURE_DEPENDS}
"${BROTLI_SRC}/common/*.c"
"${BROTLI_SRC}/enc/*.c"
"${BROTLI_SRC}/dec/*.c"
)
add_library(brotli STATIC ${BROTLI_FILES})
target_include_directories(brotli PRIVATE "${BROTLI_SRC}/include")
target_compile_definitions(brotli PRIVATE "BROTLI_STATIC")
if(WIN32)
target_compile_options(brotli PRIVATE /MT /U_DLL)
endif()
target_link_libraries(${bun} PRIVATE brotli)
include_directories("${BUN_DEPS_DIR}/brotli/include")
if(USE_CUSTOM_LSHPACK)
include_directories(${BUN_DEPS_DIR}/ls-hpack)
@@ -1532,6 +1436,7 @@ if(NOT WIN32)
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libJavaScriptCore.a")
target_link_libraries(${bun} PRIVATE "${WEBKIT_LIB_DIR}/libbmalloc.a")
else()
target_link_options(${bun} PRIVATE "-static")
target_link_libraries(${bun} PRIVATE
"${WEBKIT_LIB_DIR}/WTF.lib"
"${WEBKIT_LIB_DIR}/JavaScriptCore.lib"
@@ -1541,10 +1446,10 @@ else()
winmm
bcrypt
ntdll
ucrt
userenv
dbghelp
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
delayimp.lib
)
endif()

View File

@@ -5,7 +5,6 @@ If you are using Windows, please refer to [this guide](/docs/project/building-wi
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
{% /details %}
## Install Dependencies

View File

@@ -52,8 +52,11 @@ ENV CI 1
ENV CPU_TARGET=${CPU_TARGET}
ENV BUILDARCH=${BUILDARCH}
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
ENV BUN_ENABLE_LTO 1
ENV CXX=clang++-${LLVM_VERSION}
ENV CC=clang-${LLVM_VERSION}
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
ENV LD=lld-${LLVM_VERSION}
ENV LC_CTYPE=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
@@ -90,8 +93,6 @@ RUN install_packages \
clangd-${LLVM_VERSION} \
libc++-${LLVM_VERSION}-dev \
libc++abi-${LLVM_VERSION}-dev \
llvm-${LLVM_VERSION}-runtime \
llvm-${LLVM_VERSION}-dev \
make \
cmake \
ninja-build \
@@ -118,15 +119,6 @@ RUN install_packages \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/llvm-ranlib-${LLVM_VERSION} /usr/bin/ranlib \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang /usr/bin/c89 \
&& ln -sf /usr/bin/clang /usr/bin/c99 \
&& ln -sf /usr/bin/clang++ /usr/bin/c++ \
&& ln -sf /usr/bin/clang++ /usr/bin/g++ \
&& ln -sf /usr/bin/llvm-ar /usr/bin/ar \
&& ln -sf /usr/bin/clang /usr/bin/gcc \
&& arch="$(dpkg --print-architecture)" \
&& case "${arch##*-}" in \
amd64) variant="x64";; \
@@ -139,7 +131,6 @@ RUN install_packages \
&& ln -s /usr/bin/bun /usr/bin/bunx \
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
# echo "Setting up sccache" \
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
@@ -176,14 +167,13 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& bash ./scripts/build-cares.sh \
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile ${BUN_DIR}/scripts
&& make c-ares \
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
FROM bun-base as lolhtml
@@ -214,14 +204,13 @@ ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
COPY scripts ${BUN_DIR}/scripts
ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd ${BUN_DIR} \
&& bash ./scripts/build-mimalloc.sh \
&& make mimalloc \
&& rm -rf src/deps/mimalloc Makefile
FROM bun-base as mimalloc-debug
@@ -251,38 +240,14 @@ ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
COPY scripts ${BUN_DIR}/scripts
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& bash ./scripts/build-zlib.sh && rm -rf src/deps/zlib scripts
FROM bun-base as libdeflate
ARG BUN_DIR
ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
COPY scripts ${BUN_DIR}/scripts
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& bash ./scripts/build-libdeflate.sh && rm -rf src/deps/libdeflate scripts
&& make zlib \
&& rm -rf src/deps/zlib Makefile
FROM bun-base as libarchive
@@ -321,7 +286,6 @@ ARG CPU_TARGET
ENV CPU_TARGET=${CPU_TARGET}
COPY Makefile ${BUN_DIR}/Makefile
COPY scripts ${BUN_DIR}/scripts
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
WORKDIR $BUN_DIR
@@ -331,7 +295,7 @@ ENV CCACHE_DIR=${CCACHE_DIR}
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd ${BUN_DIR} \
&& bash ./scripts/build-boringssl.sh \
&& make boringssl \
&& rm -rf src/deps/boringssl Makefile
@@ -347,14 +311,12 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& bash ./scripts/build-zstd.sh \
&& rm -rf src/deps/zstd scripts
&& make zstd
FROM bun-base as ls-hpack
@@ -368,14 +330,12 @@ ENV CCACHE_DIR=${CCACHE_DIR}
COPY Makefile ${BUN_DIR}/Makefile
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
COPY scripts ${BUN_DIR}/scripts
WORKDIR $BUN_DIR
RUN --mount=type=cache,target=${CCACHE_DIR} \
cd $BUN_DIR \
&& bash ./scripts/build-lshpack.sh \
&& rm -rf src/deps/ls-hpack scripts
&& make lshpack
FROM bun-base-with-zig as bun-identifier-cache
@@ -433,9 +393,6 @@ COPY src ${BUN_DIR}/src
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
# for uWebSockets
COPY src/deps/libdeflate ${BUN_DIR}/src/deps/libdeflate
ARG CCACHE_DIR=/ccache
ENV CCACHE_DIR=${CCACHE_DIR}
@@ -534,13 +491,11 @@ RUN mkdir -p build bun-webkit
# lol
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
COPY src/deps/brotli ${BUN_DIR}/src/deps/brotli
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=libdeflate ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
@@ -550,8 +505,7 @@ COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.a ${BUN_DIR}/build/
COPY --from=bun-cpp-objects ${BUN_DIR}/build/*.o ${BUN_DIR}/build/
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
WORKDIR $BUN_DIR/build

2
LATEST
View File

@@ -1 +1 @@
1.1.21
1.1.18

View File

@@ -34,8 +34,6 @@ Bun statically links these libraries:
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |

View File

@@ -26,11 +26,8 @@ ifeq ($(ARCH_NAME_RAW),arm64)
ARCH_NAME = aarch64
DOCKER_BUILDARCH = arm64
BREW_PREFIX_PATH = /opt/homebrew
DEFAULT_MIN_MACOS_VERSION = 13.0
DEFAULT_MIN_MACOS_VERSION = 11.0
MARCH_NATIVE = -mtune=$(CPU_TARGET)
ifeq ($(OS_NAME),linux)
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
endif
else
ARCH_NAME = x64
DOCKER_BUILDARCH = amd64
@@ -157,12 +154,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
-DCMAKE_AR=$(AR) \
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
-DCMAKE_CXX_STANDARD=20 \
-DCMAKE_C_STANDARD=17 \
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
-DCMAKE_C_STANDARD_REQUIRED=ON \
-DCMAKE_CXX_EXTENSIONS=ON
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
@@ -189,8 +181,8 @@ endif
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
BUN_TMP_DIR := /tmp/make-bun
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)

View File

@@ -1,43 +1,20 @@
import { run, bench, group } from "mitata";
import { run, bench } from "mitata";
import { gzipSync, gunzipSync } from "bun";
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
const data = new TextEncoder().encode("Hello World!".repeat(9999));
const compressed = gzipSync(data);
const libraries = ["zlib"];
if (Bun.semver.satisfies(Bun.version.replaceAll("-debug", ""), ">=1.1.21")) {
libraries.push("libdeflate");
}
const options = { library: undefined };
const benchFn = (name, fn) => {
if (libraries.length > 1) {
group(name, () => {
for (const library of libraries) {
bench(library, () => {
options.library = library;
fn();
});
}
});
} else {
options.library = libraries[0];
bench(name, () => {
fn();
});
}
};
benchFn(`roundtrip - @babel/standalone/babel.min.js`, () => {
gunzipSync(gzipSync(data, options), options);
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
gunzipSync(gzipSync(data));
});
benchFn(`gzipSync(@babel/standalone/babel.min.js`, () => {
gzipSync(data, options);
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
gzipSync(data);
});
benchFn(`gunzipSync(@babel/standalone/babel.min.js`, () => {
gunzipSync(compressed, options);
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
gunzipSync(compressed);
});
await run();

Binary file not shown.

View File

@@ -1,22 +1,19 @@
import { run, bench } from "mitata";
import { gzipSync, gunzipSync } from "zlib";
import { createRequire } from "module";
import { readFileSync } from "fs";
const require = createRequire(import.meta.url);
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
const data = new TextEncoder().encode("Hello World!".repeat(9999));
const compressed = gzipSync(data);
bench(`roundtrip - @babel/standalone/babel.min.js)`, () => {
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
gunzipSync(gzipSync(data));
});
bench(`gzipSync(@babel/standalone/babel.min.js))`, () => {
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
gzipSync(data);
});
bench(`gunzipSync(@babel/standalone/babel.min.js))`, () => {
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
gunzipSync(compressed);
});

View File

@@ -7,8 +7,5 @@
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
},
"dependencies": {
"@babel/standalone": "7.24.10"
}
}

View File

@@ -1,31 +0,0 @@
import { run, bench } from "mitata";
import { createRequire } from "module";
const require = createRequire(import.meta.url);
const db = require("better-sqlite3")("./src/northwind.sqlite");
{
const sql = db.prepare(`SELECT * FROM "Order"`);
bench('SELECT * FROM "Order"', () => {
sql.all();
});
}
{
const sql = db.prepare(`SELECT * FROM "Product"`);
bench('SELECT * FROM "Product"', () => {
sql.all();
});
}
{
const sql = db.prepare(`SELECT * FROM "OrderDetail"`);
bench('SELECT * FROM "OrderDetail"', () => {
sql.all();
});
}
await run();

View File

@@ -1,9 +1,8 @@
// Run `node --experimental-sqlite bench/sqlite/node.mjs` to run the script.
// You will need `--experimental-sqlite` flag to run this script and node v22.5.0 or higher.
import { run, bench } from "mitata";
import { DatabaseSync as Database } from "node:sqlite";
import { createRequire } from "module";
const db = new Database("./src/northwind.sqlite");
const require = createRequire(import.meta.url);
const db = require("better-sqlite3")("./src/northwind.sqlite");
{
const sql = db.prepare(`SELECT * FROM "Order"`);

205
build.zig
View File

@@ -33,6 +33,8 @@ comptime {
}
}
const default_reported_nodejs_version = "22.3.0";
const zero_sha = "0000000000000000000000000000000000000000";
const BunBuildOptions = struct {
@@ -46,10 +48,9 @@ const BunBuildOptions = struct {
sha: []const u8,
enable_logs: bool = false,
tracy_callstack_depth: u16,
reported_nodejs_version: Version,
reported_nodejs_version: []const u8 = default_reported_nodejs_version,
generated_code_dir: []const u8,
no_llvm: bool,
cached_options_module: ?*Module = null,
windows_shim: ?WindowsShim = null,
@@ -72,7 +73,14 @@ const BunBuildOptions = struct {
opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM"));
opts.addOption(bool, "baseline", this.isBaseline());
opts.addOption(bool, "enable_logs", this.enable_logs);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
opts.addOption([:0]const u8, "reported_nodejs_version", b.allocator.dupeZ(u8, this.reported_nodejs_version) catch @panic("OOM"));
if (this.reported_nodejs_version.len > 0 and this.reported_nodejs_version[0] == 'v') {
@panic("Node.js version should not start with 'v'");
}
if (this.reported_nodejs_version.len == 0) {
@panic("Node.js version should not be empty");
}
const mod = opts.createModule();
this.cached_options_module = mod;
@@ -114,23 +122,6 @@ pub fn getOSGlibCVersion(os: OperatingSystem) ?Version {
};
}
pub fn getCpuModel(os: OperatingSystem, arch: Arch) ?Target.Query.CpuModel {
// https://github.com/oven-sh/bun/issues/12076
if (os == .linux and arch == .aarch64) {
return .{ .explicit = &Target.aarch64.cpu.cortex_a35 };
}
// Be explicit and ensure we do not accidentally target a newer M-series chip
if (os == .mac and arch == .aarch64) {
return .{ .explicit = &Target.aarch64.cpu.apple_m1 };
}
// note: x86_64 is dealt with in the CMake config and passed in.
// the reason for the explicit handling on aarch64 is due to troubles
// passing the exact target in via flags.
return null;
}
pub fn build(b: *Build) !void {
std.log.info("zig compiler v{s}", .{builtin.zig_version_string});
@@ -156,14 +147,6 @@ pub fn build(b: *Build) !void {
break :brk .{ os, arch };
};
// target must be refined to support older but very popular devices on
// aarch64, this means moving the minimum supported CPU to support certain
// raspberry PIs. there are also a number of cloud hosts that use virtual
// machines with surprisingly out of date versions of glibc.
if (getCpuModel(os, arch)) |cpu_model| {
target_query.cpu_model = cpu_model;
}
target_query.os_version_min = getOSVersionMin(os);
target_query.glibc_version = getOSGlibCVersion(os);
@@ -180,10 +163,6 @@ pub fn build(b: *Build) !void {
break :ref_trace if (trace == 0) null else trace;
};
const obj_format = b.option(ObjectFormat, "obj_format", "Output file for object files") orelse .obj;
const no_llvm = b.option(bool, "no_llvm", "Experiment with Zig self hosted backends. No stability guaranteed") orelse false;
var build_options = BunBuildOptions{
.target = target,
.optimize = optimize,
@@ -192,7 +171,6 @@ pub fn build(b: *Build) !void {
.arch = arch,
.generated_code_dir = generated_code_dir,
.no_llvm = no_llvm,
.version = try Version.parse(bun_version),
.canary_revision = canary: {
@@ -200,10 +178,7 @@ pub fn build(b: *Build) !void {
break :canary if (rev == 0) null else rev;
},
.reported_nodejs_version = try Version.parse(
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
"0.0.0-unset",
),
.reported_nodejs_version = b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse default_reported_nodejs_version,
.sha = sha: {
const sha = b.option([]const u8, "sha", "Force the git sha") orelse
@@ -249,7 +224,7 @@ pub fn build(b: *Build) !void {
var step = b.step("obj", "Build Bun's Zig code as a .o file");
var bun_obj = addBunObject(b, &build_options);
step.dependOn(&bun_obj.step);
step.dependOn(addInstallObjectFile(b, bun_obj, "bun-zig", obj_format));
step.dependOn(&b.addInstallFile(bun_obj.getEmittedBin(), "bun-zig.o").step);
}
// zig build windows-shim
@@ -277,61 +252,96 @@ pub fn build(b: *Build) !void {
// zig build check-all
{
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
addMultiCheck(b, step, build_options, &.{
var step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
inline for (.{
.{ .os = .windows, .arch = .x86_64 },
.{ .os = .mac, .arch = .x86_64 },
.{ .os = .mac, .arch = .aarch64 },
.{ .os = .linux, .arch = .x86_64 },
.{ .os = .linux, .arch = .aarch64 },
});
}) |check| {
inline for (.{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = getOSGlibCVersion(check.os),
});
var options = BunBuildOptions{
.target = check_target,
.os = check.os,
.arch = check_target.result.cpu.arch,
.optimize = mode,
.canary_revision = build_options.canary_revision,
.sha = build_options.sha,
.tracy_callstack_depth = build_options.tracy_callstack_depth,
.version = build_options.version,
.reported_nodejs_version = build_options.reported_nodejs_version,
.generated_code_dir = build_options.generated_code_dir,
};
var obj = addBunObject(b, &options);
obj.generated_bin = null;
step.dependOn(&obj.step);
}
}
}
// zig build check-windows
{
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
addMultiCheck(b, step, build_options, &.{
var step = b.step("check-windows", "Check for semantic analysis errors on Windows x64");
inline for (.{
.{ .os = .windows, .arch = .x86_64 },
});
}
}
}) |check| {
inline for (.{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = getOSGlibCVersion(check.os),
});
pub inline fn addMultiCheck(
b: *Build,
parent_step: *Step,
root_build_options: BunBuildOptions,
to_check: []const struct { os: OperatingSystem, arch: Arch },
) void {
inline for (to_check) |check| {
inline for (.{ .Debug, .ReleaseFast }) |mode| {
const check_target = b.resolveTargetQuery(.{
.os_tag = OperatingSystem.stdOSTag(check.os),
.cpu_arch = check.arch,
.cpu_model = getCpuModel(check.os, check.arch) orelse .determined_by_cpu_arch,
.os_version_min = getOSVersionMin(check.os),
.glibc_version = getOSGlibCVersion(check.os),
});
var options = BunBuildOptions{
.target = check_target,
.os = check.os,
.arch = check_target.result.cpu.arch,
.optimize = mode,
var options: BunBuildOptions = .{
.target = check_target,
.os = check.os,
.arch = check_target.result.cpu.arch,
.optimize = mode,
.canary_revision = root_build_options.canary_revision,
.sha = root_build_options.sha,
.tracy_callstack_depth = root_build_options.tracy_callstack_depth,
.version = root_build_options.version,
.reported_nodejs_version = root_build_options.reported_nodejs_version,
.generated_code_dir = root_build_options.generated_code_dir,
.no_llvm = root_build_options.no_llvm,
};
var obj = addBunObject(b, &options);
obj.generated_bin = null;
parent_step.dependOn(&obj.step);
.canary_revision = build_options.canary_revision,
.sha = build_options.sha,
.tracy_callstack_depth = build_options.tracy_callstack_depth,
.version = build_options.version,
.reported_nodejs_version = build_options.reported_nodejs_version,
.generated_code_dir = build_options.generated_code_dir,
};
var obj = addBunObject(b, &options);
obj.generated_bin = null;
step.dependOn(&obj.step);
}
}
}
// Running `zig build` with no arguments is almost always a mistake.
// TODO: revive this error. cannot right now since ZLS runs zig build without arguments
{
// const mistake_message = b.addSystemCommand(&.{
// "echo",
// \\
// \\To build Bun from source, please use `bun run setup` instead of `zig build`"
// \\For more info, see https://bun.sh/docs/project/contributing
// \\
// \\If you want to build the zig code in isolation, run:
// \\ 'zig build obj -Dgenerated-code=./build/codegen [...opts]'
// \\
// \\If you want to test a compile without emitting an object:
// \\ 'zig build check'
// \\ 'zig build check-all' (run linux+mac+windows)
// \\
// });
// b.default_step.dependOn(&mistake_message.step);
}
}
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
@@ -343,15 +353,10 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
},
.target = opts.target,
.optimize = opts.optimize,
.use_llvm = !opts.no_llvm,
.use_lld = if (opts.os == .mac) false else !opts.no_llvm,
// https://github.com/ziglang/zig/issues/17430
.pic = true,
.omit_frame_pointer = false,
.strip = false, // stripped at the end
});
obj.bundle_compiler_rt = false;
obj.formatted_panics = true;
obj.root_module.omit_frame_pointer = false;
@@ -369,10 +374,9 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
}
if (opts.os == .linux) {
obj.link_emit_relocs = false;
obj.link_eh_frame_hdr = false;
obj.link_emit_relocs = true;
obj.link_eh_frame_hdr = true;
obj.link_function_sections = true;
obj.link_data_sections = true;
if (opts.optimize == .Debug) {
obj.root_module.valgrind = true;
@@ -383,25 +387,6 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
return obj;
}
const ObjectFormat = enum {
bc,
obj,
};
pub fn addInstallObjectFile(
b: *Build,
compile: *Compile,
name: []const u8,
out_mode: ObjectFormat,
) *Step {
// bin always needed to be computed or else the compilation will do nothing. zig build system bug?
const bin = compile.getEmittedBin();
return &b.addInstallFile(switch (out_mode) {
.obj => bin,
.bc => compile.getEmittedLlvmBc(),
}, b.fmt("{s}.o", .{name})).step;
}
fn exists(path: []const u8) bool {
const file = std.fs.openFileAbsolute(path, .{ .mode = .read_only }) catch return false;
file.close();
@@ -462,11 +447,7 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
fn validateGeneratedPath(path: []const u8) void {
if (!exists(path)) {
std.debug.panic(
\\Generated file '{s}' is missing!
\\
\\Make sure to use CMake and Ninja, or pass a manual codegen folder with '-Dgenerated-code=...'
, .{path});
std.debug.panic("{s} does not exist in generated code directory!", .{std.fs.path.basename(path)});
}
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -425,7 +425,6 @@ _bun_run_completion() {
'--external[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
'-e[Exclude module from transpilation (can use * wildcards). ex: -e react]:external' \
'--loader[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
'--packages[Exclude dependencies from bundle, e.g. --packages external. Valid options: bundle, external]:packages' \
'-l[Parse files with .ext:loader, e.g. --loader .js:jsx. Valid loaders: js, jsx, ts, tsx, json, toml, text, file, wasm, napi]:loader' \
'--origin[Rewrite import URLs to start with --origin. Default: ""]:origin' \
'-u[Rewrite import URLs to start with --origin. Default: ""]:origin' \

View File

@@ -1,308 +0,0 @@
Bun implements the WHATWG `fetch` standard, with some extensions to meet the needs of server-side JavaScript.
Bun also implements `node:http`, but `fetch` is generally recommended instead.
## Sending an HTTP request
To send an HTTP request, use `fetch`
```ts
const response = await fetch("http://example.com");
console.log(response.status); // => 200
const text = await response.text(); // or response.json(), response.formData(), etc.
```
`fetch` also works with HTTPS URLs.
```ts
const response = await fetch("https://example.com");
```
You can also pass `fetch` a [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) object.
```ts
const request = new Request("http://example.com", {
method: "POST",
body: "Hello, world!",
});
const response = await fetch(request);
```
### Sending a POST request
To send a POST request, pass an object with the `method` property set to `"POST"`.
```ts
const response = await fetch("http://example.com", {
method: "POST",
body: "Hello, world!",
});
```
`body` can be a string, a `FormData` object, an `ArrayBuffer`, a `Blob`, and more. See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Body/body) for more information.
### Proxying requests
To proxy a request, pass an object with the `proxy` property set to a URL.
```ts
const response = await fetch("http://example.com", {
proxy: "http://proxy.com",
});
```
### Custom headers
To set custom headers, pass an object with the `headers` property set to an object.
```ts
const response = await fetch("http://example.com", {
headers: {
"X-Custom-Header": "value",
},
});
```
You can also set headers using the [Headers](https://developer.mozilla.org/en-US/docs/Web/API/Headers) object.
```ts
const headers = new Headers();
headers.append("X-Custom-Header", "value");
const response = await fetch("http://example.com", {
headers,
});
```
### Response bodies
To read the response body, use one of the following methods:
- `response.text(): Promise<string>`: Returns a promise that resolves with the response body as a string.
- `response.json(): Promise<any>`: Returns a promise that resolves with the response body as a JSON object.
- `response.formData(): Promise<FormData>`: Returns a promise that resolves with the response body as a `FormData` object.
- `response.bytes(): Promise<Uint8Array>`: Returns a promise that resolves with the response body as a `Uint8Array`.
- `response.arrayBuffer(): Promise<ArrayBuffer>`: Returns a promise that resolves with the response body as an `ArrayBuffer`.
- `response.blob(): Promise<Blob>`: Returns a promise that resolves with the response body as a `Blob`.
#### Streaming response bodies
You can use async iterators to stream the response body.
```ts
const response = await fetch("http://example.com");
for await (const chunk of response.body) {
console.log(chunk);
}
```
You can also more directly access the `ReadableStream` object.
```ts
const response = await fetch("http://example.com");
const stream = response.body;
const reader = stream.getReader();
const { value, done } = await reader.read();
```
### Fetching a URL with a timeout
To fetch a URL with a timeout, use `AbortSignal.timeout`:
```ts
const response = await fetch("http://example.com", {
signal: AbortSignal.timeout(1000),
});
```
#### Canceling a request
To cancel a request, use an `AbortController`:
```ts
const controller = new AbortController();
const response = await fetch("http://example.com", {
signal: controller.signal,
});
controller.abort();
```
### Unix domain sockets
To fetch a URL using a Unix domain socket, use the `unix: string` option:
```ts
const response = await fetch("https://hostname/a/path", {
unix: "/var/run/path/to/unix.sock",
method: "POST",
body: JSON.stringify({ message: "Hello from Bun!" }),
headers: {
"Content-Type": "application/json",
},
});
```
### TLS
To use a client certificate, use the `tls` option:
```ts
await fetch("https://example.com", {
tls: {
key: Bun.file("/path/to/key.pem"),
cert: Bun.file("/path/to/cert.pem"),
// ca: [Bun.file("/path/to/ca.pem")],
},
});
```
#### Custom TLS Validation
To customize the TLS validation, use the `checkServerIdentity` option in `tls`
```ts
await fetch("https://example.com", {
tls: {
checkServerIdentity: (hostname, peerCertificate) => {
// Return an error if the certificate is invalid
},
},
});
```
This is similar to how it works in Node's `net` module.
## Debugging
To help with debugging, you can pass `verbose: true` to `fetch`:
```ts
const response = await fetch("http://example.com", {
verbose: true,
});
```
This will print the request and response headers to your terminal:
```sh
[fetch] > HTTP/1.1 GET http://example.com/
[fetch] > Connection: keep-alive
[fetch] > User-Agent: Bun/1.1.21
[fetch] > Accept: */*
[fetch] > Host: example.com
[fetch] > Accept-Encoding: gzip, deflate, br
[fetch] < 200 OK
[fetch] < Content-Encoding: gzip
[fetch] < Age: 201555
[fetch] < Cache-Control: max-age=604800
[fetch] < Content-Type: text/html; charset=UTF-8
[fetch] < Date: Sun, 21 Jul 2024 02:41:14 GMT
[fetch] < Etag: "3147526947+gzip"
[fetch] < Expires: Sun, 28 Jul 2024 02:41:14 GMT
[fetch] < Last-Modified: Thu, 17 Oct 2019 07:18:26 GMT
[fetch] < Server: ECAcc (sac/254F)
[fetch] < Vary: Accept-Encoding
[fetch] < X-Cache: HIT
[fetch] < Content-Length: 648
```
Note: `verbose: boolean` is not part of the Web standard `fetch` API and is specific to Bun.
## Performance
Before an HTTP request can be sent, the DNS lookup must be performed. This can take a significant amount of time, especially if the DNS server is slow or the network connection is poor.
After the DNS lookup, the TCP socket must be connected and the TLS handshake might need to be performed. This can also take a significant amount of time.
After the request completes, consuming the response body can also take a significant amount of time and memory.
At every step of the way, Bun provides APIs to help you optimize the performance of your application.
### DNS prefetching
To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful when you know you'll need to connect to a host soon and want to avoid the initial DNS lookup.
```ts
import { dns } from "bun";
dns.prefetch("bun.sh", 443);
```
#### DNS caching
By default, Bun caches and deduplicates DNS queries in-memory for up to 30 seconds. You can see the cache stats by calling `dns.getCacheStats()`:
To learn more about DNS caching in Bun, see the [DNS caching](/docs/api/dns) documentation.
### Preconnect to a host
To preconnect to a host, you can use the `fetch.preconnect` API. This API is useful when you know you'll need to connect to a host soon and want to start the initial DNS lookup, TCP socket connection, and TLS handshake early.
```ts
import { fetch } from "bun";
fetch.preconnect("https://bun.sh");
```
Note: calling `fetch` immediately after `fetch.preconnect` will not make your request faster. Preconnecting only helps if you know you'll need to connect to a host soon, but you're not ready to make the request yet.
#### Preconnect at startup
To preconnect to a host at startup, you can pass `--fetch-preconnect`:
```sh
$ bun --fetch-preconnect https://bun.sh ./my-script.ts
```
This is sort of like `<link rel="preconnect">` in HTML.
This feature is not implemented on Windows yet. If you're interested in using this feature on Windows, please file an issue and we can implement support for it on Windows.
### Connection pooling & HTTP keep-alive
Bun automatically reuses connections to the same host. This is known as connection pooling. This can significantly reduce the time it takes to establish a connection. You don't need to do anything to enable this; it's automatic.
#### Simultaneous connection limit
By default, Bun limits the maximum number of simultaneous `fetch` requests to 256. We do this for several reasons:
- It improves overall system stability. Operating systems have an upper limit on the number of simultaneous open TCP sockets, usually in the low thousands. Nearing this limit causes your entire computer to behave strangely. Applications hang and crash.
- It encourages HTTP Keep-Alive connection reuse. For short-lived HTTP requests, the slowest step is often the initial connection setup. Reusing connections can save a lot of time.
When the limit is exceeded, the requests are queued and sent as soon as the next request ends.
You can increase the maximum number of simultaneous connections via the `BUN_CONFIG_MAX_HTTP_REQUESTS` environment variable:
```sh
$ BUN_CONFIG_MAX_HTTP_REQUESTS=512 bun ./my-script.ts
```
The max value for this limit is currently set to 65,336. The maximum port number is 65,535, so it's quite difficult for any one computer to exceed this limit.
### Response buffering
Bun goes to great lengths to optimize the performance of reading the response body. The fastest way to read the response body is to use one of these methods:
- `response.text(): Promise<string>`
- `response.json(): Promise<any>`
- `response.formData(): Promise<FormData>`
- `response.bytes(): Promise<Uint8Array>`
- `response.arrayBuffer(): Promise<ArrayBuffer>`
- `response.blob(): Promise<Blob>`
You can also use `Bun.write` to write the response body to a file on disk:
```ts
import { write } from "bun";
await write("output.txt", response);
```

View File

@@ -756,25 +756,6 @@ $ bun build ./index.tsx --outdir ./out --external '*'
{% /codetabs %}
### `packages`
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.ts'],
packages: 'external',
})
```
```bash#CLI
$ bun build ./index.ts --packages external
```
{% /codetabs %}
### `naming`
Customizes the generated file names. Defaults to `./[dir]/[name].[ext]`.

View File

@@ -94,8 +94,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
---
- `--packages`
- `--packages`
- No differences
- n/a
- Not supported
---

View File

@@ -287,11 +287,8 @@ export default {
divider("API"),
page("api/http", "HTTP server", {
description: `Bun implements a fast HTTP server built on Request/Response objects, along with supporting node:http APIs.`,
description: `Bun implements Web-standard fetch, plus a Bun-native API for building fast HTTP servers.`,
}), // "`Bun.serve`"),
page("api/fetch", "HTTP client", {
description: `Bun implements Web-standard fetch with some Bun-native extensions.`,
}), // "fetch"),
page("api/websockets", "WebSockets", {
description: `Bun supports server-side WebSockets with on-the-fly compression, TLS support, and a Bun-native pubsub API.`,
}), // "`Bun.serve`"),

View File

@@ -153,7 +153,7 @@ Some methods are not optimized yet.
### [`node:util`](https://nodejs.org/api/util.html)
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
🟡 Missing `MIMEParams` `MIMEType` `aborted` `debug` `getSystemErrorMap` `getSystemErrorName` `transferableAbortController` `transferableAbortSignal` `stripVTControlCharacters`
### [`node:v8`](https://nodejs.org/api/v8.html)

View File

@@ -7,36 +7,22 @@ The following Web APIs are partially or completely supported.
---
- HTTP
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch)
[`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response)
[`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request)
[`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers)
[`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController)
[`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
- [`fetch`](https://developer.mozilla.org/en-US/docs/Web/API/fetch) [`Response`](https://developer.mozilla.org/en-US/docs/Web/API/Response) [`Request`](https://developer.mozilla.org/en-US/docs/Web/API/Request) [`Headers`](https://developer.mozilla.org/en-US/docs/Web/API/Headers) [`AbortController`](https://developer.mozilla.org/en-US/docs/Web/API/AbortController) [`AbortSignal`](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)
---
- URLs
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL)
[`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
- [`URL`](https://developer.mozilla.org/en-US/docs/Web/API/URL) [`URLSearchParams`](https://developer.mozilla.org/en-US/docs/Web/API/URLSearchParams)
---
- Web Workers
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker)
[`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage)
[`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone)
[`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort)
[`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
- [`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/DedicatedWorkerGlobalScope/postMessage) [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) [`MessagePort`](https://developer.mozilla.org/en-US/docs/Web/API/MessagePort) [`MessageChannel`](https://developer.mozilla.org/en-US/docs/Web/API/MessageChannel), [`BroadcastChannel`](https://developer.mozilla.org/en-US/docs/Web/API/BroadcastChannel).
---
- Streams
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream)
[`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream)
[`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream)
[`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy)
[`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
- [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream) [`WritableStream`](https://developer.mozilla.org/en-US/docs/Web/API/WritableStream) [`TransformStream`](https://developer.mozilla.org/en-US/docs/Web/API/TransformStream) [`ByteLengthQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/ByteLengthQueuingStrategy) [`CountQueuingStrategy`](https://developer.mozilla.org/en-US/docs/Web/API/CountQueuingStrategy) and associated classes
---
@@ -51,10 +37,7 @@ The following Web APIs are partially or completely supported.
---
- Encoding and decoding
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob)
[`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa)
[`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder)
[`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
- [`atob`](https://developer.mozilla.org/en-US/docs/Web/API/atob) [`btoa`](https://developer.mozilla.org/en-US/docs/Web/API/btoa) [`TextEncoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextEncoder) [`TextDecoder`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoder)
---
@@ -64,8 +47,7 @@ The following Web APIs are partially or completely supported.
---
- Timeouts
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout)
[`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
- [`setTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/setTimeout) [`clearTimeout`](https://developer.mozilla.org/en-US/docs/Web/API/clearTimeout)
---
@@ -75,16 +57,14 @@ The following Web APIs are partially or completely supported.
---
- Crypto
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto)
[`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
- [`crypto`](https://developer.mozilla.org/en-US/docs/Web/API/Crypto) [`SubtleCrypto`](https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto)
[`CryptoKey`](https://developer.mozilla.org/en-US/docs/Web/API/CryptoKey)
---
- Debugging
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console)
[`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
- [`console`](https://developer.mozilla.org/en-US/docs/Web/API/console) [`performance`](https://developer.mozilla.org/en-US/docs/Web/API/Performance)
---
@@ -99,9 +79,7 @@ The following Web APIs are partially or completely supported.
---
- User interaction
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert)
[`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm)
[`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
- [`alert`](https://developer.mozilla.org/en-US/docs/Web/API/Window/alert) [`confirm`](https://developer.mozilla.org/en-US/docs/Web/API/Window/confirm) [`prompt`](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) (intended for interactive CLIs)
<!-- - Blocking. Prints the alert message to terminal and awaits `[ENTER]` before proceeding. -->
<!-- - Blocking. Prints confirmation message and awaits `[y/N]` input from user. Returns `true` if user entered `y` or `Y`, `false` otherwise.
@@ -116,10 +94,7 @@ The following Web APIs are partially or completely supported.
- Events
- [`EventTarget`](https://developer.mozilla.org/en-US/docs/Web/API/EventTarget)
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event)
[`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent)
[`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent)
[`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
[`Event`](https://developer.mozilla.org/en-US/docs/Web/API/Event) [`ErrorEvent`](https://developer.mozilla.org/en-US/docs/Web/API/ErrorEvent) [`CloseEvent`](https://developer.mozilla.org/en-US/docs/Web/API/CloseEvent) [`MessageEvent`](https://developer.mozilla.org/en-US/docs/Web/API/MessageEvent)
---

View File

@@ -196,41 +196,3 @@ As of Bun v1.0.19, Bun automatically resolves the `specifier` argument to `mock.
After resolution, the mocked module is stored in the ES Module registry **and** the CommonJS require cache. This means that you can use `import` and `require` interchangeably for mocked modules.
The callback function is called lazily, only if the module is imported or required. This means that you can use `mock.module()` to mock modules that don't exist yet, and it means that you can use `mock.module()` to mock modules that are imported by other modules.
## Restore all function mocks to their original values with `mock.restore()`
Instead of manually restoring each mock individually with `mockFn.mockRestore()`, restore all mocks with one command by calling `mock.restore()`. Doing so does not reset the value of modules overridden with `mock.module()`.
Using `mock.restore()` can reduce the amount of code in your tests by adding it to `afterEach` blocks in each test file or even in your [test preload code](https://bun.sh/docs/runtime/bunfig#test-preload).
```ts
import { expect, mock, spyOn, test } from "bun:test";
import * as fooModule from './foo.ts';
import * as barModule from './bar.ts';
import * as bazModule from './baz.ts';
test('foo, bar, baz', () => {
const fooSpy = spyOn(fooModule, 'foo');
const barSpy = spyOn(barModule, 'bar');
const bazSpy = spyOn(bazModule, 'baz');
expect(fooSpy).toBe('foo');
expect(barSpy).toBe('bar');
expect(bazSpy).toBe('baz');
fooSpy.mockImplementation(() => 42);
barSpy.mockImplementation(() => 43);
bazSpy.mockImplementation(() => 44);
expect(fooSpy).toBe(42);
expect(barSpy).toBe(43);
expect(bazSpy).toBe(44);
mock.restore();
expect(fooSpy).toBe('foo');
expect(barSpy).toBe('bar');
expect(bazSpy).toBe('baz');
});
```

View File

@@ -195,6 +195,7 @@ pub fn main() anyerror!void {
args.headers_buf,
response_body_string,
args.body,
0,
HTTP.FetchRedirect.follow,
),
};

View File

@@ -31,6 +31,7 @@ const params = [_]clap.Param(clap.Help){
clap.parseParam("-b, --body <STR> HTTP request body as a string") catch unreachable,
clap.parseParam("-f, --file <STR> File path to load as body") catch unreachable,
clap.parseParam("-n, --count <INT> How many runs? Default 10") catch unreachable,
clap.parseParam("-t, --timeout <INT> Max duration per request") catch unreachable,
clap.parseParam("-r, --retry <INT> Max retry count") catch unreachable,
clap.parseParam("--no-gzip Disable gzip") catch unreachable,
clap.parseParam("--no-deflate Disable deflate") catch unreachable,
@@ -74,6 +75,7 @@ pub const Arguments = struct {
body: string = "",
turbo: bool = false,
count: usize = 10,
timeout: usize = 0,
repeat: usize = 0,
concurrency: u16 = 32,
@@ -163,6 +165,10 @@ pub const Arguments = struct {
// .keep_alive = !args.flag("--no-keep-alive"),
.concurrency = std.fmt.parseInt(u16, args.option("--max-concurrency") orelse "32", 10) catch 32,
.turbo = args.flag("--turbo"),
.timeout = std.fmt.parseInt(usize, args.option("--timeout") orelse "0", 10) catch |err| {
Output.prettyErrorln("<r><red>{s}<r> parsing timeout", .{@errorName(err)});
Global.exit(1);
},
.count = std.fmt.parseInt(usize, args.option("--count") orelse "10", 10) catch |err| {
Output.prettyErrorln("<r><red>{s}<r> parsing count", .{@errorName(err)});
Global.exit(1);
@@ -219,6 +225,7 @@ pub fn main() anyerror!void {
args.headers_buf,
response_body,
"",
args.timeout,
),
};
ctx.http.client.verbose = args.verbose;

View File

@@ -4,7 +4,7 @@
"workspaces": [
"./packages/bun-types"
],
"devDependencies": {
"dependencies": {
"@vscode/debugadapter": "^1.65.0",
"esbuild": "^0.21.4",
"eslint": "^9.4.0",
@@ -15,7 +15,9 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.4.5",
"typescript": "^5.4.5"
},
"devDependencies": {
"@types/bun": "^1.1.3",
"@types/react": "^18.3.3",
"@typescript-eslint/eslint-plugin": "^7.11.0",

View File

@@ -22,10 +22,10 @@ bun upgrade
- [Linux, arm64](https://www.npmjs.com/package/@oven/bun-linux-aarch64)
- [Linux, x64](https://www.npmjs.com/package/@oven/bun-linux-x64)
- [Linux, x64 (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-linux-x64-baseline)
- [Windows](https://www.npmjs.com/package/@oven/bun-windows-x64)
- [Windows (without AVX2 instructions)](https://www.npmjs.com/package/@oven/bun-windows-x64-baseline)
- [Windows (using Windows Subsystem for Linux, aka. "WSL")](https://relatablecode.com/how-to-set-up-bun-on-a-windows-machine)
### Future Platforms
- [Windows](https://github.com/oven-sh/bun/issues/43)
- Unix-like variants such as FreeBSD, OpenBSD, etc.
- Android and iOS

View File

@@ -1455,7 +1455,7 @@ declare module "bun" {
* ```js
* const {imports, exports} = transpiler.scan(`
* import {foo} from "baz";
* export const hello = "hi!";
* const hello = "hi!";
* `);
*
* console.log(imports); // ["baz"]
@@ -1516,7 +1516,6 @@ declare module "bun" {
plugins?: BunPlugin[];
// manifest?: boolean; // whether to return manifest
external?: string[];
packages?: "bundle" | "external";
publicPath?: string;
define?: Record<string, string>;
// origin?: string; // e.g. http://mydomain.com
@@ -3100,10 +3099,6 @@ declare module "bun" {
*/
function openInEditor(path: string, options?: EditorOptions): void;
const fetch: typeof globalThis.fetch & {
preconnect(url: string): void;
};
interface EditorOptions {
editor?: "vscode" | "subl";
line?: number;
@@ -3481,13 +3476,6 @@ declare module "bun" {
* Filtered data consists mostly of small values with a somewhat random distribution.
*/
strategy?: number;
library?: "zlib";
}
interface LibdeflateCompressionOptions {
level?: 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12;
library?: "libdeflate";
}
/**
@@ -3496,38 +3484,26 @@ declare module "bun" {
* @param options Compression options to use
* @returns The output buffer with the compressed data
*/
function deflateSync(
data: Uint8Array | string | ArrayBuffer,
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
function deflateSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
/**
* Compresses a chunk of data with `zlib` GZIP algorithm.
* @param data The buffer of data to compress
* @param options Compression options to use
* @returns The output buffer with the compressed data
*/
function gzipSync(
data: Uint8Array | string | ArrayBuffer,
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
function gzipSync(data: Uint8Array | string | ArrayBuffer, options?: ZlibCompressionOptions): Uint8Array;
/**
* Decompresses a chunk of data with `zlib` INFLATE algorithm.
* @param data The buffer of data to decompress
* @returns The output buffer with the decompressed data
*/
function inflateSync(
data: Uint8Array | string | ArrayBuffer,
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
function inflateSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
/**
* Decompresses a chunk of data with `zlib` GUNZIP algorithm.
* @param data The buffer of data to decompress
* @returns The output buffer with the decompressed data
*/
function gunzipSync(
data: Uint8Array | string | ArrayBuffer,
options?: ZlibCompressionOptions | LibdeflateCompressionOptions,
): Uint8Array;
function gunzipSync(data: Uint8Array | string | ArrayBuffer): Uint8Array;
type Target =
/**
@@ -3847,7 +3823,7 @@ declare module "bun" {
*/
const isMainThread: boolean;
interface Socket<Data = undefined> extends Disposable {
interface Socket<Data = undefined> {
/**
* Write `data` to the socket
*
@@ -4129,7 +4105,7 @@ declare module "bun" {
setMaxSendFragment(size: number): boolean;
}
interface SocketListener<Data = undefined> extends Disposable {
interface SocketListener<Data = undefined> {
stop(closeActiveConnections?: boolean): void;
ref(): void;
unref(): void;

View File

@@ -907,42 +907,26 @@ declare global {
new (): ShadowRealm;
};
interface Fetch {
/**
* Send a HTTP(s) request
*
* @param request Request object
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
(request: Request, init?: RequestInit): Promise<Response>;
/**
* Send a HTTP(s) request
*
* @param request Request object
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
/**
* Send a HTTP(s) request
*
* @param url URL string
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
(input: string | URL | globalThis.Request, init?: RequestInit): Promise<Response>;
/**
* Start the DNS resolution, TCP connection, and TLS handshake for a request
* before the request is actually sent.
*
* This can reduce the latency of a request when you know there's some
* long-running task that will delay the request starting.
*
* This is a bun-specific API and is not part of the Fetch API specification.
*/
preconnect(url: string | URL): void;
}
var fetch: Fetch;
// tslint:disable-next-line:unified-signatures
function fetch(request: Request, init?: RequestInit): Promise<Response>;
/**
* Send a HTTP(s) request
*
* @param url URL string
* @param init A structured value that contains settings for the fetch() request.
*
* @returns A promise that resolves to {@link Response} object.
*/
function fetch(url: string | URL | Request, init?: FetchRequestInit): Promise<Response>;
function queueMicrotask(callback: (...args: any[]) => void): void;
/**

View File

@@ -882,7 +882,7 @@ int bsd_connect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd, const char *host, int por
}
freeaddrinfo(result);
return (int)LIBUS_SOCKET_ERROR;
return LIBUS_SOCKET_ERROR;
}
int bsd_disconnect_udp_socket(LIBUS_SOCKET_DESCRIPTOR fd) {

View File

@@ -0,0 +1,38 @@
CAPI_EXAMPLE_FILES := HelloWorld HelloWorldAsync ServerName UpgradeSync UpgradeAsync EchoServer Broadcast BroadcastEchoServer
RUST_EXAMPLE_FILES := RustHelloWorld
LIBRARY_NAME := libuwebsockets
default:
$(MAKE) capi
$(CXX) -O3 -flto -I ../src -I ../uSockets/src examples/HelloWorld.c *.o -lz -luv -lssl -lcrypto -lstdc++ ../uSockets/uSockets.a -o HelloWorld
capi:
$(MAKE) clean
cd ../uSockets && $(CC) -pthread -DUWS_WITH_PROXY -DLIBUS_USE_OPENSSL -DLIBUS_USE_LIBUV -std=c11 -Isrc -flto -fPIC -O3 -c src/*.c src/eventing/*.c src/crypto/*.c
cd ../uSockets && $(CXX) -std=c++17 -flto -fPIC -O3 -c src/crypto/*.cpp
cd ../uSockets && $(AR) rvs uSockets.a *.o
$(CXX) -DUWS_WITH_PROXY -c -O3 -std=c++17 -lz -luv -flto -fPIC -I ../src -I ../uSockets/src $(LIBRARY_NAME).cpp
$(AR) rvs $(LIBRARY_NAME).a $(LIBRARY_NAME).o ../uSockets/uSockets.a
shared:
$(MAKE) clean
cd ../uSockets && $(CC) -pthread -DUWS_WITH_PROXY -DLIBUS_USE_OPENSSL -DLIBUS_USE_LIBUV -std=c11 -Isrc -flto -fPIC -O3 -c src/*.c src/eventing/*.c src/crypto/*.c
cd ../uSockets && $(CXX) -std=c++17 -flto -fPIC -O3 -c src/crypto/*.cpp
cd ../uSockets && $(AR) rvs uSockets.a *.o
$(CXX) -DUWS_WITH_PROXY -c -O3 -std=c++17 -lz -luv -flto -fPIC -I ../src -I ../uSockets/src $(LIBRARY_NAME).cpp
$(CXX) -shared -o $(LIBRARY_NAME).so $(LIBRARY_NAME).o ../uSockets/uSockets.a -fPIC -lz -luv -lssl -lcrypto
misc:
mkdir -p ../misc && openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ../misc/key.pem -out ../misc/cert.pem
rust:
$(MAKE) capi
rustc -C link-arg=$(LIBRARY_NAME).a -C link-args="-lstdc++ -luv" -C opt-level=3 -C lto -L all=. examples/RustHelloWorld.rs -o RustHelloWorld
clean:
rm -f *.o $(CAPI_EXAMPLE_FILES) $(RUST_EXAMPLE_FILES) $(LIBRARY_NAME).a $(LIBRARY_NAME).so
all:
for FILE in $(CAPI_EXAMPLE_FILES); do $(CXX) -O3 -flto -I ../src -I ../uSockets/src examples/$$FILE.c *.o -luv -lstdc++ ../uSockets/uSockets.a -o $$FILE & done; \
wait

View File

@@ -0,0 +1,157 @@
#include "../libuwebsockets.h"
#include <stdio.h>
#include <malloc.h>
#include <time.h>
#include <string.h>
#include <stdarg.h>
#define SSL 1
//Timer close helper
void uws_timer_close(struct us_timer_t *timer)
{
struct us_timer_t *t = (struct us_timer_t *)timer;
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
free(data);
us_timer_close(t, 0);
}
//Timer create helper
struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data)
{
struct us_loop_t *loop = uws_get_loop();
struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *));
struct timer_handler_data
{
void *data;
void (*handler)(void *data);
bool repeat;
};
struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data));
timer_data->data = data;
timer_data->handler = handler;
timer_data->repeat = repeat_ms > 0;
memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *));
us_timer_set(
delayTimer, [](struct us_timer_t *t)
{
/* We wrote the pointer to the timer's extension */
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
data->handler(data->data);
if (!data->repeat)
{
free(data);
us_timer_close(t, 0);
}
},
ms, repeat_ms);
return (struct us_timer_t *)delayTimer;
}
/* This is a simple WebSocket "sync" upgrade example.
* You may compile it with "WITH_OPENSSL=1 make" or with "make" */
/* ws->getUserData returns one of these */
struct PerSocketData {
/* Fill with user data */
};
int buffer_size(const char* format, ...) {
va_list args;
va_start(args, format);
int result = vsnprintf(NULL, 0, format, args);
va_end(args);
return result + 1; // safe byte for \0
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data)
{
if (listen_socket){
printf("Listening on port wss://localhost:%d\n", config.port);
}
}
void open_handler(uws_websocket_t* ws){
/* Open event here, you may access uws_ws_get_user_data(WS) which points to a PerSocketData struct */
uws_ws_subscribe(SSL, ws, "broadcast", 9);
}
void message_handler(uws_websocket_t* ws, const char* message, size_t length, uws_opcode_t opcode){
}
void close_handler(uws_websocket_t* ws, int code, const char* message, size_t length){
/* You may access uws_ws_get_user_data(ws) here, but sending or
* doing any kind of I/O with the socket is not valid. */
}
void drain_handler(uws_websocket_t* ws){
/* Check uws_ws_get_buffered_amount(ws) here */
}
void ping_handler(uws_websocket_t* ws, const char* message, size_t length){
/* You don't need to handle this one, we automatically respond to pings as per standard */
}
void pong_handler(uws_websocket_t* ws, const char* message, size_t length){
/* You don't need to handle this one either */
}
void on_timer_interval(void* data){
// broadcast the unix time as millis
uws_app_t * app = (uws_app_t *)data;
struct timespec ts;
timespec_get(&ts, TIME_UTC);
int64_t millis = ts.tv_sec * 1000 + ts.tv_nsec / 1000000;
char* message = (char*)malloc((size_t)buffer_size("%ld", millis));
size_t message_length = sprintf(message, "%ld", millis);
uws_publish(SSL, app, "broadcast", 9, message, message_length, uws_opcode_t::TEXT, false);
free(message);
}
int main()
{
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_ws(SSL, app, "/*", (uws_socket_behavior_t){
.compression = uws_compress_options_t::SHARED_COMPRESSOR,
.maxPayloadLength = 16 * 1024,
.idleTimeout = 12,
.maxBackpressure = 1 * 1024 * 1024,
.upgrade = NULL,
.open = open_handler,
.message = message_handler,
.drain = drain_handler,
.ping = ping_handler,
.pong = pong_handler,
.close = close_handler,
});
uws_app_listen(SSL, app, 9001, listen_handler, NULL);
// broadcast the unix time as millis every 8 millis
uws_create_timer(8, 8, on_timer_interval, app);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,175 @@
#include "../libuwebsockets.h"
#include <stdio.h>
#include <malloc.h>
#include <time.h>
#include <string.h>
#include <stdarg.h>
#define SSL 1
/* This is a simple WebSocket "sync" upgrade example.
* You may compile it with "WITH_OPENSSL=1 make" or with "make" */
typedef struct
{
size_t length;
char *name;
} topic_t;
/* ws->getUserData returns one of these */
struct PerSocketData
{
/* Fill with user data */
topic_t **topics;
int topics_quantity;
int nr;
};
uws_app_t *app;
int buffer_size(const char *format, ...)
{
va_list args;
va_start(args, format);
int result = vsnprintf(NULL, 0, format, args);
va_end(args);
return result + 1; // safe byte for \0
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data)
{
if (listen_socket)
{
printf("Listening on port wss://localhost:%d\n", config.port);
}
}
void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context)
{
/* You may read from req only here, and COPY whatever you need into your PerSocketData.
* PerSocketData is valid from .open to .close event, accessed with uws_ws_get_user_data(ws).
* HttpRequest (req) is ONLY valid in this very callback, so any data you will need later
* has to be COPIED into PerSocketData here. */
/* Immediately upgrading without doing anything "async" before, is simple */
struct PerSocketData *data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData));
data->topics = (topic_t **)calloc(32, sizeof(topic_t *));
data->topics_quantity = 32;
data->nr = 0;
const char *ws_key = NULL;
const char *ws_protocol = NULL;
const char *ws_extensions = NULL;
size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key);
size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol);
size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions);
uws_res_upgrade(SSL,
response,
(void *)data,
ws_key,
ws_key_length,
ws_protocol,
ws_protocol_length,
ws_extensions,
ws_extensions_length,
context);
}
void open_handler(uws_websocket_t *ws)
{
/* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
for (int i = 0; i < data->topics_quantity; i++)
{
char *topic = (char *)malloc((size_t)buffer_size("%ld-%d", (uintptr_t)ws, i));
size_t topic_length = sprintf(topic, "%ld-%d", (uintptr_t)ws, i);
topic_t *new_topic = (topic_t*) malloc(sizeof(topic_t));
new_topic->length = topic_length;
new_topic->name = topic;
data->topics[i] = new_topic;
uws_ws_subscribe(SSL, ws, topic, topic_length);
}
}
void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode)
{
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
topic_t *topic = data->topics[(size_t)(++data->nr % data->topics_quantity)];
uws_publish(SSL, app, topic->name, topic->length, message, length, opcode, false);
topic = data->topics[(size_t)(++data->nr % data->topics_quantity)];
uws_ws_publish(SSL, ws, topic->name, topic->length, message, length);
}
void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length)
{
/* You may access uws_ws_get_user_data(ws) here, but sending or
* doing any kind of I/O with the socket is not valid. */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
if (data)
{
for (int i = 0; i < data->topics_quantity; i++)
{
topic_t* topic = data->topics[i];
free(topic->name);
free(topic);
}
free(data->topics);
free(data);
}
}
void drain_handler(uws_websocket_t *ws)
{
/* Check uws_ws_get_buffered_amount(ws) here */
}
void ping_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one, we automatically respond to pings as per standard */
}
void pong_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one either */
}
int main()
{
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_ws(SSL, app, "/*", (uws_socket_behavior_t){
.compression = uws_compress_options_t::SHARED_COMPRESSOR,
.maxPayloadLength = 16 * 1024,
.idleTimeout = 12,
.maxBackpressure = 1 * 1024 * 1024,
.upgrade = upgrade_handler,
.open = open_handler,
.message = message_handler,
.drain = drain_handler,
.ping = ping_handler,
.pong = pong_handler,
.close = close_handler,
});
uws_app_listen(SSL, app, 9001, listen_handler, NULL);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,81 @@
#include "../libuwebsockets.h"
#include <stdio.h>
#include <malloc.h>
#define SSL 1
/* This is a simple WebSocket "sync" upgrade example.
* You may compile it with "WITH_OPENSSL=1 make" or with "make" */
/* ws->getUserData returns one of these */
struct PerSocketData {
/* Fill with user data */
};
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data)
{
if (listen_socket){
printf("Listening on port wss://localhost:%d\n", config.port);
}
}
void open_handler(uws_websocket_t* ws){
/* Open event here, you may access uws_ws_get_user_data(WS) which points to a PerSocketData struct */
}
void message_handler(uws_websocket_t* ws, const char* message, size_t length, uws_opcode_t opcode){
uws_ws_send(SSL, ws, message, length, opcode);
}
void close_handler(uws_websocket_t* ws, int code, const char* message, size_t length){
/* You may access uws_ws_get_user_data(ws) here, but sending or
* doing any kind of I/O with the socket is not valid. */
}
void drain_handler(uws_websocket_t* ws){
/* Check uws_ws_get_buffered_amount(ws) here */
}
void ping_handler(uws_websocket_t* ws, const char* message, size_t length){
/* You don't need to handle this one, we automatically respond to pings as per standard */
}
void pong_handler(uws_websocket_t* ws, const char* message, size_t length){
/* You don't need to handle this one either */
}
int main()
{
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_ws(SSL, app, "/*", (uws_socket_behavior_t){
.compression = uws_compress_options_t::SHARED_COMPRESSOR,
.maxPayloadLength = 16 * 1024,
.idleTimeout = 12,
.maxBackpressure = 1 * 1024 * 1024,
.upgrade = NULL,
.open = open_handler,
.message = message_handler,
.drain = drain_handler,
.ping = ping_handler,
.pong = pong_handler,
.close = close_handler,
});
uws_app_listen(SSL,app, 9001, listen_handler, NULL);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,33 @@
#include "../libuwebsockets.h"
#include "libusockets.h"
#include <stdio.h>
#define SSL 1
void get_handler(uws_res_t *res, uws_req_t *req, void *user_data)
{
uws_res_end(SSL, res, "Hello CAPI!", 11, false);
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data)
{
if (listen_socket)
{
printf("Listening on port https://localhost:%d now\n", config.port);
}
}
int main()
{
/* Overly simple hello world app */
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_app_get(SSL, app, "/*", get_handler, NULL);
uws_app_listen(SSL, app, 3000, listen_handler, NULL);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,123 @@
#include "../libuwebsockets.h"
#include "libusockets.h"
#include <stdio.h>
#include <malloc.h>
#include <string.h>
#define SSL 0
typedef struct {
uws_res_t* res;
bool aborted;
} async_request_t;
//Timer close helper
void uws_timer_close(struct us_timer_t *timer)
{
struct us_timer_t *t = (struct us_timer_t *)timer;
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
free(data);
us_timer_close(t, 0);
}
//Timer create helper
struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data)
{
struct us_loop_t *loop = uws_get_loop();
struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *));
struct timer_handler_data
{
void *data;
void (*handler)(void *data);
bool repeat;
};
struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data));
timer_data->data = data;
timer_data->handler = handler;
timer_data->repeat = repeat_ms > 0;
memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *));
us_timer_set(
delayTimer, [](struct us_timer_t *t)
{
/* We wrote the pointer to the timer's extension */
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
data->handler(data->data);
if (!data->repeat)
{
free(data);
us_timer_close(t, 0);
}
},
ms, repeat_ms);
return (struct us_timer_t *)delayTimer;
}
void on_res_aborted(uws_res_t *response, void* data){
async_request_t* request_data = (async_request_t*)data;
/* We don't implement any kind of cancellation here,
* so simply flag us as aborted */
request_data->aborted = true;
}
void on_res_corked(uws_res_t *response, void* data){
uws_res_end(SSL, response, "Hello CAPI!", 11, false);
}
void on_timer_done(void *data){
async_request_t* request_data = (async_request_t*)data;
/* Were'nt we aborted before our async task finished? Okay, send a message! */
if(!request_data->aborted){
uws_res_cork(SSL, request_data->res,on_res_corked, request_data);
}
}
void get_handler(uws_res_t *res, uws_req_t *req, void* user_data)
{
/* We have to attach an abort handler for us to be aware
* of disconnections while we perform async tasks */
async_request_t* request_data = (async_request_t*) malloc(sizeof(async_request_t));
request_data->res = res;
request_data->aborted = false;
uws_res_on_aborted(SSL, res, on_res_aborted, request_data);
/* Simulate checking auth for 5 seconds. This looks like crap, never write
* code that utilize us_timer_t like this; they are high-cost and should
* not be created and destroyed more than rarely!
* Either way, here we go!*/
uws_create_timer(1, 0, on_timer_done, request_data);
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data)
{
if (listen_socket)
{
printf("Listening on port https://localhost:%d now\n", config.port);
}
}
int main()
{
/* Overly simple hello world app with async response */
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_app_get(SSL, app, "/*", get_handler, NULL);
uws_app_listen(SSL, app, 3000, listen_handler, NULL);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,309 @@
/* automatically generated by rust-bindgen 0.59.2 */
use std::convert::TryInto;
use std::ffi::CString;
pub type SizeT = ::std::os::raw::c_ulong;
pub type WcharT = ::std::os::raw::c_uint;
#[repr(C)]
#[repr(align(16))]
#[derive(Debug, Copy, Clone)]
pub struct max_align_t {
pub __clang_max_align_nonce1: ::std::os::raw::c_longlong,
pub __bindgen_padding_0: u64,
pub __clang_max_align_nonce2: u128,
}
#[test]
fn bindgen_test_layout_max_align_t() {
assert_eq!(
::std::mem::size_of::<max_align_t>(),
32usize,
concat!("Size of: ", stringify!(max_align_t))
);
assert_eq!(
::std::mem::align_of::<max_align_t>(),
16usize,
concat!("Alignment of ", stringify!(max_align_t))
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<max_align_t>())).__clang_max_align_nonce1 as *const _ as usize
},
0usize,
concat!(
"Offset of field: ",
stringify!(max_align_t),
"::",
stringify!(__clang_max_align_nonce1)
)
);
assert_eq!(
unsafe {
&(*(::std::ptr::null::<max_align_t>())).__clang_max_align_nonce2 as *const _ as usize
},
16usize,
concat!(
"Offset of field: ",
stringify!(max_align_t),
"::",
stringify!(__clang_max_align_nonce2)
)
);
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct uws_app_s {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct uws_req_s {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct uws_res_s {
_unused: [u8; 0],
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct uws_app_listen_config_s {
port: ::std::os::raw::c_int,
host: *const ::std::os::raw::c_char,
options: ::std::os::raw::c_int,
}
#[repr(C)]
#[derive(Debug, Copy, Clone)]
pub struct us_socket_context_options_s {
key_file_name: *const ::std::os::raw::c_char,
cert_file_name: *const ::std::os::raw::c_char,
passphrase: *const ::std::os::raw::c_char,
dh_params_file_name: *const ::std::os::raw::c_char,
ca_file_name: *const ::std::os::raw::c_char,
ssl_prefer_low_memory_usage: ::std::os::raw::c_int,
}
pub type UwsAppListenConfigT = uws_app_listen_config_s;
pub type UsSocketContextOptionsT = us_socket_context_options_s;
pub struct UsSocketContextOptions<'a> {
key_file_name: &'a str,
cert_file_name: &'a str,
passphrase: &'a str,
dh_params_file_name: &'a str,
ca_file_name: &'a str,
ssl_prefer_low_memory_usage: i32,
}
pub type UwsAppT = uws_app_s;
pub type UwsReqT = uws_req_s;
pub type UwsResT = uws_res_s;
extern "C" {
pub fn uws_create_app(
ssl: ::std::os::raw::c_int,
options: UsSocketContextOptionsT,
) -> *mut UwsAppT;
pub fn uws_app_get(
ssl: ::std::os::raw::c_int,
app: *mut UwsAppT,
pattern: *const ::std::os::raw::c_char,
handler: ::std::option::Option<
unsafe extern "C" fn(
res: *mut UwsResT,
req: *mut UwsReqT,
user_data: *mut ::std::os::raw::c_void,
),
>,
user_data: *mut ::std::os::raw::c_void,
);
pub fn uws_app_run(ssl: ::std::os::raw::c_int, app: *mut UwsAppT);
pub fn uws_app_listen(
ssl: ::std::os::raw::c_int,
app: *mut UwsAppT,
port: ::std::os::raw::c_int,
handler: ::std::option::Option<
unsafe extern "C" fn(
listen_socket: *mut ::std::os::raw::c_void,
config: UwsAppListenConfigT,
user_data: *mut ::std::os::raw::c_void,
),
>,
user_data: *mut ::std::os::raw::c_void,
);
pub fn uws_res_end(
ssl: ::std::os::raw::c_int,
res: *mut UwsResT,
data: *const ::std::os::raw::c_char,
length: SizeT,
close_connection: bool,
);
}
pub struct AppResponse<const SSL: i32> {
native: *mut UwsResT,
}
pub struct AppRequest {
native: *mut UwsReqT,
}
impl AppRequest {
pub fn new(native: *mut UwsReqT) -> AppRequest {
AppRequest { native: native }
}
}
impl<const SSL: i32> AppResponse<SSL> {
pub fn new(native: *mut UwsResT) -> AppResponse<SSL> {
AppResponse::<SSL> { native: native }
}
fn end(self, message: &str) -> AppResponse<SSL> {
unsafe {
let c_message =
::std::ffi::CString::new(message).expect("Failed to create message CString");
//This will now const fold :/ performance impact needs refactor
uws_res_end(
SSL,
self.native,
c_message.as_ptr(),
message.len().try_into().unwrap(),
false,
);
}
self
}
}
pub type UwsMethodHandler<const SSL: i32> = fn(res: AppResponse<SSL>, req: AppRequest);
pub type UwsListenHandler =
fn(listen_socket: *mut ::std::os::raw::c_void, config: UwsAppListenConfigT);
pub struct TemplateApp<const SSL: i32> {
native: *mut UwsAppT,
}
extern "C" fn uws_generic_listen_handler(
listen_socket: *mut ::std::os::raw::c_void,
config: UwsAppListenConfigT,
user_data: *mut ::std::os::raw::c_void,
) {
unsafe {
let callback = &mut *(user_data as *mut UwsListenHandler);
callback(listen_socket, config);
}
}
extern "C" fn uws_generic_method_handler(
res: *mut UwsResT,
req: *mut UwsReqT,
user_data: *mut ::std::os::raw::c_void,
) {
unsafe {
let response = AppResponse::<0>::new(res);
let request = AppRequest::new(req);
let callback = &mut *(user_data as *mut UwsMethodHandler<0>);
callback(response, request);
}
}
extern "C" fn uws_ssl_generic_method_handler(
res: *mut UwsResT,
req: *mut UwsReqT,
user_data: *mut ::std::os::raw::c_void,
) {
unsafe {
let response = AppResponse::<1>::new(res);
let request = AppRequest::new(req);
let callback = &mut *(user_data as *mut UwsMethodHandler<1>);
callback(response, request);
}
}
impl<const SSL: i32> TemplateApp<SSL> {
pub fn new(config: UsSocketContextOptions) -> TemplateApp<SSL> {
unsafe {
let key_file_name_s =
CString::new(config.key_file_name).expect("Failed to create key_file_name CString");
let cert_file_name_s = CString::new(config.cert_file_name)
.expect("Failed to create cert_file_name CString");
let passphrase_s =
CString::new(config.passphrase).expect("Failed to create passphrase CString");
let dh_params_file_name_s = CString::new(config.dh_params_file_name)
.expect("Failed to create dh_params_file_name CString");
let ca_file_name_s =
CString::new(config.ca_file_name).expect("Failed to create ca_file_name CString");
let native_options = UsSocketContextOptionsT {
key_file_name: key_file_name_s.as_ptr(),
cert_file_name: cert_file_name_s.as_ptr(),
passphrase: passphrase_s.as_ptr(),
dh_params_file_name: dh_params_file_name_s.as_ptr(),
ca_file_name: ca_file_name_s.as_ptr(),
ssl_prefer_low_memory_usage: config.ssl_prefer_low_memory_usage,
};
TemplateApp::<SSL> {
native: uws_create_app(SSL, native_options),
}
}
}
pub fn get(self, route: &str, mut handler: UwsMethodHandler<SSL>) -> TemplateApp<SSL> {
unsafe {
let c_route = ::std::ffi::CString::new(route).expect("Failed to create route CString");
if SSL == 1 {
uws_app_get(
SSL,
self.native,
c_route.as_ptr(),
std::option::Option::Some(uws_ssl_generic_method_handler),
&mut handler as *mut _ as *mut ::std::os::raw::c_void,
);
} else {
uws_app_get(
SSL,
self.native,
c_route.as_ptr(),
std::option::Option::Some(uws_generic_method_handler),
&mut handler as *mut _ as *mut ::std::os::raw::c_void,
);
}
}
self
}
pub fn listen(self, port: i32, mut handler: UwsListenHandler) -> TemplateApp<SSL> {
unsafe {
uws_app_listen(
SSL,
self.native,
port,
::std::option::Option::Some(uws_generic_listen_handler),
&mut handler as *mut _ as *mut ::std::os::raw::c_void,
);
}
self
}
pub fn run(self) -> TemplateApp<SSL> {
unsafe {
uws_app_run(SSL, self.native);
}
self
}
}
pub type App = TemplateApp<0>;
pub type SSLApp = TemplateApp<1>;
fn main() {
let config = UsSocketContextOptions {
key_file_name: "../misc/key.pem",
cert_file_name: "../misc/cert.pem",
passphrase: "1234",
ca_file_name: "",
dh_params_file_name: "",
ssl_prefer_low_memory_usage: 0,
};
SSLApp::new(config)
.get("/", |res, _req| {
res.end("Hello Rust!");
})
.listen(3000, |_listen_socket, config| {
println!("Listening on port https://127.0.0.1:{}", config.port);
})
.run();
}

View File

@@ -0,0 +1,59 @@
#include "../libuwebsockets.h"
#include <stdio.h>
#include <string.h>
#define SSL 1
struct us_listen_socket_t *globalListenSocket;
uws_app_t *app;
void get_handler(uws_res_t *res, uws_req_t *req, void* user_data)
{
uws_res_end(SSL, res, "Hello CAPI!", 11, false);
}
void exit_handler(uws_res_t *res, uws_req_t *req, void* user_data)
{
uws_res_end(SSL, res, "Shutting down!",14, false);
/* We use this to check graceful closedown */
us_listen_socket_close(false, globalListenSocket);
}
void missing_server_name_handler(const char *hostname, void* user_data){
printf("We are missing server name: <%s>\n", hostname);
/* Assume it is localhost, so add it */
uws_add_server_name(SSL, app, "localhost");
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void* user_data)
{
if (listen_socket){
printf("Listening on port https://localhost:%d\n", config.port);
globalListenSocket = listen_socket;
}else{
printf("Failed to listen on port https://localhost:%d\n", config.port);
}
}
int main()
{
/* Overly simple hello world app (SNI)*/
app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_missing_server_name(SSL, app, missing_server_name_handler, NULL);
uws_app_get(SSL, app, "/*", get_handler, NULL);
uws_app_get(SSL, app, "/exit", exit_handler, NULL);
uws_app_listen(SSL, app, 3000, listen_handler, NULL);
/* Let's add a wildcard SNI to begin with */
uws_add_server_name(SSL, app, "*.google.*");
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,255 @@
#include "../libuwebsockets.h"
#include "libusockets.h"
#include <stdio.h>
#include <malloc.h>
#include <string.h>
/* This is a simple WebSocket "sync" upgrade example.
* You may compile it with "WITH_OPENSSL=1 make" or with "make" */
#define SSL 1
typedef struct
{
char *value;
size_t length;
} header_t;
struct PerSocketData
{
/* Define your user data */
int something;
};
struct UpgradeData
{
header_t *secWebSocketKey;
header_t *secWebSocketProtocol;
header_t *secWebSocketExtensions;
uws_socket_context_t *context;
uws_res_t *response;
bool aborted;
};
header_t *create_header(size_t length, const char* value)
{
header_t *header = (header_t *)malloc(sizeof(header_t));
if(length > 0){
header->value = (char *)calloc(sizeof(char), length);
header->length = length;
memcpy(header->value, value, length);
}else{
header->value = NULL;
header->length = 0;
}
return header;
}
void free_header(header_t *header)
{
free(header->value);
free(header);
}
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data)
{
if (listen_socket)
{
printf("Listening on port wss://localhost:%d\n", config.port);
}
}
//Timer close helper
void uws_timer_close(struct us_timer_t *timer)
{
struct us_timer_t *t = (struct us_timer_t *)timer;
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
free(data);
us_timer_close(t, 0);
}
//Timer create helper
struct us_timer_t *uws_create_timer(int ms, int repeat_ms, void (*handler)(void *data), void *data)
{
struct us_loop_t *loop = uws_get_loop();
struct us_timer_t *delayTimer = us_create_timer(loop, 0, sizeof(void *));
struct timer_handler_data
{
void *data;
void (*handler)(void *data);
bool repeat;
};
struct timer_handler_data *timer_data = (struct timer_handler_data *)malloc(sizeof(timer_handler_data));
timer_data->data = data;
timer_data->handler = handler;
timer_data->repeat = repeat_ms > 0;
memcpy(us_timer_ext(delayTimer), &timer_data, sizeof(struct timer_handler_data *));
us_timer_set(
delayTimer, [](struct us_timer_t *t)
{
/* We wrote the pointer to the timer's extension */
struct timer_handler_data *data;
memcpy(&data, us_timer_ext(t), sizeof(struct timer_handler_data *));
data->handler(data->data);
if (!data->repeat)
{
free(data);
us_timer_close(t, 0);
}
},
ms, repeat_ms);
return (struct us_timer_t *)delayTimer;
}
void on_timer_done(void *data)
{
struct UpgradeData *upgrade_data = (struct UpgradeData *)data;
/* Were'nt we aborted before our async task finished? Okay, upgrade then! */
if (!upgrade_data->aborted)
{
struct PerSocketData *socket_data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData));
socket_data->something = 15;
printf("Async task done, upgrading to WebSocket now!\n");
uws_res_upgrade(SSL,
upgrade_data->response,
(void *)socket_data,
upgrade_data->secWebSocketKey->value,
upgrade_data->secWebSocketKey->length,
upgrade_data->secWebSocketProtocol->value,
upgrade_data->secWebSocketProtocol->length,
upgrade_data->secWebSocketExtensions->value,
upgrade_data->secWebSocketExtensions->length,
upgrade_data->context);
}
else
{
printf("Async task done, but the HTTP socket was closed. Skipping upgrade to WebSocket!\n");
}
free_header(upgrade_data->secWebSocketKey);
free_header(upgrade_data->secWebSocketProtocol);
free_header(upgrade_data->secWebSocketExtensions);
free(upgrade_data);
}
void on_res_aborted(uws_res_t *response, void *data)
{
struct UpgradeData *upgrade_data = (struct UpgradeData *)data;
/* We don't implement any kind of cancellation here,
* so simply flag us as aborted */
upgrade_data->aborted = true;
}
void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context)
{
/* HttpRequest (req) is only valid in this very callback, so we must COPY the headers
* we need later on while upgrading to WebSocket. You must not access req after first return.
* Here we create a heap allocated struct holding everything we will need later on. */
struct UpgradeData *data = (struct UpgradeData *)malloc(sizeof(struct UpgradeData));
data->aborted = false;
data->context = context;
data->response = response;
const char *ws_key = NULL;
const char *ws_protocol = NULL;
const char *ws_extensions = NULL;
size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key);
size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol);
size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions);
data->secWebSocketKey = create_header(ws_key_length, ws_key);
data->secWebSocketProtocol = create_header(ws_protocol_length, ws_protocol);
data->secWebSocketExtensions = create_header(ws_extensions_length, ws_extensions);
/* We have to attach an abort handler for us to be aware
* of disconnections while we perform async tasks */
uws_res_on_aborted(SSL, response, on_res_aborted, data);
/* Simulate checking auth for 5 seconds. This looks like crap, never write
* code that utilize us_timer_t like this; they are high-cost and should
* not be created and destroyed more than rarely!
* Either way, here we go!*/
uws_create_timer(5000, 0, on_timer_done, data);
}
void open_handler(uws_websocket_t *ws)
{
/* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct.
* Here we simply validate that indeed, something == 15 as set in upgrade handler. */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
data->something = 15;
printf("Something is: %d\n", data->something);
}
void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode)
{
/* We simply echo whatever data we get */
uws_ws_send(SSL, ws, message, length, opcode);
}
void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length)
{
/* You may access uws_ws_get_user_data(ws) here, but sending or
* doing any kind of I/O with the socket is not valid. */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
if (data)
{
free(data);
}
}
void drain_handler(uws_websocket_t *ws)
{
/* Check uws_ws_get_buffered_amount(ws) here */
}
void ping_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one, we automatically respond to pings as per standard */
}
void pong_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one either */
}
int main()
{
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_ws(SSL, app, "/*", (uws_socket_behavior_t){
.compression = uws_compress_options_t::SHARED_COMPRESSOR,
.maxPayloadLength = 16 * 1024,
.idleTimeout = 12,
.maxBackpressure = 1 * 1024 * 1024,
.upgrade = upgrade_handler,
.open = open_handler,
.message = message_handler,
.drain = drain_handler,
.ping = ping_handler,
.pong = pong_handler,
.close = close_handler,
});
uws_app_listen(SSL, app, 9001, listen_handler, NULL);
uws_app_run(SSL, app);
}

View File

@@ -0,0 +1,117 @@
#include "../libuwebsockets.h"
#include <stdio.h>
#include <malloc.h>
#define SSL 1
/* This is a simple WebSocket "sync" upgrade example.
* You may compile it with "WITH_OPENSSL=1 make" or with "make" */
/* uws_ws_get_user_data(ws) returns one of these */
struct PerSocketData
{
/* Define your user data */
int something;
};
void listen_handler(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data)
{
if (listen_socket)
{
printf("Listening on port wss://localhost:%d\n", config.port);
}
}
void upgrade_handler(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context)
{
/* You may read from req only here, and COPY whatever you need into your PerSocketData.
* PerSocketData is valid from .open to .close event, accessed with uws_ws_get_user_data(ws).
* HttpRequest (req) is ONLY valid in this very callback, so any data you will need later
* has to be COPIED into PerSocketData here. */
/* Immediately upgrading without doing anything "async" before, is simple */
struct PerSocketData *data = (struct PerSocketData *)malloc(sizeof(struct PerSocketData));
data->something = 15;
const char *ws_key = NULL;
const char *ws_protocol = NULL;
const char *ws_extensions = NULL;
size_t ws_key_length = uws_req_get_header(request, "sec-websocket-key", 17, &ws_key);
size_t ws_protocol_length = uws_req_get_header(request, "sec-websocket-protocol", 22, &ws_protocol);
size_t ws_extensions_length = uws_req_get_header(request, "sec-websocket-extensions", 24, &ws_extensions);
uws_res_upgrade(SSL,
response,
(void *)data,
ws_key,
ws_key_length,
ws_protocol,
ws_protocol_length,
ws_extensions,
ws_extensions_length,
context);
}
void open_handler(uws_websocket_t *ws)
{
/* Open event here, you may access uws_ws_get_user_data(ws) which points to a PerSocketData struct.
* Here we simply validate that indeed, something == 15 as set in upgrade handler. */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
data->something = 15;
printf("Something is: %d\n", data->something);
}
void message_handler(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode)
{
/* We simply echo whatever data we get */
uws_ws_send(SSL, ws, message, length, opcode);
}
void close_handler(uws_websocket_t *ws, int code, const char *message, size_t length)
{
/* You may access uws_ws_get_user_data(ws) here, but sending or
* doing any kind of I/O with the socket is not valid. */
struct PerSocketData *data = (struct PerSocketData *)uws_ws_get_user_data(SSL, ws);
if (data)
free(data);
}
void drain_handler(uws_websocket_t *ws)
{
/* Check uws_ws_get_buffered_amount(ws) here */
}
void ping_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one, we automatically respond to pings as per standard */
}
void pong_handler(uws_websocket_t *ws, const char *message, size_t length)
{
/* You don't need to handle this one either */
}
int main()
{
uws_app_t *app = uws_create_app(SSL, (struct us_socket_context_options_t){
/* There are example certificates in uWebSockets.js repo */
.key_file_name = "../misc/key.pem",
.cert_file_name = "../misc/cert.pem",
.passphrase = "1234"
});
uws_ws(SSL, app, "/*", (uws_socket_behavior_t){.compression = uws_compress_options_t::SHARED_COMPRESSOR, .maxPayloadLength = 16 * 1024, .idleTimeout = 12, .maxBackpressure = 1 * 1024 * 1024, .upgrade = upgrade_handler, .open = open_handler, .message = message_handler, .drain = drain_handler, .ping = ping_handler, .pong = pong_handler, .close = close_handler});
uws_app_listen(SSL, app, 9001, listen_handler, NULL);
uws_app_run(SSL, app);
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,260 @@
/*
* Copyright 2022 Ciro Spaciari
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// clang-format off
#ifndef LIBUWS_CAPI_HEADER
#define LIBUWS_CAPI_HEADER
#include <stddef.h>
#include <stdbool.h>
#include <stdint.h>
#include "libusockets.h"
#ifdef __cplusplus
extern "C"
{
#endif
#ifdef _WIN32
# define DLL_EXPORT __declspec( dllexport )
#else
# define DLL_EXPORT
#endif
DLL_EXPORT typedef enum
{
/* These are not actual compression options */
_COMPRESSOR_MASK = 0x00FF,
_DECOMPRESSOR_MASK = 0x0F00,
/* Disabled, shared, shared are "special" values */
DISABLED = 0,
SHARED_COMPRESSOR = 1,
SHARED_DECOMPRESSOR = 1 << 8,
/* Highest 4 bits describe decompressor */
DEDICATED_DECOMPRESSOR_32KB = 15 << 8,
DEDICATED_DECOMPRESSOR_16KB = 14 << 8,
DEDICATED_DECOMPRESSOR_8KB = 13 << 8,
DEDICATED_DECOMPRESSOR_4KB = 12 << 8,
DEDICATED_DECOMPRESSOR_2KB = 11 << 8,
DEDICATED_DECOMPRESSOR_1KB = 10 << 8,
DEDICATED_DECOMPRESSOR_512B = 9 << 8,
/* Same as 32kb */
DEDICATED_DECOMPRESSOR = 15 << 8,
/* Lowest 8 bit describe compressor */
DEDICATED_COMPRESSOR_3KB = 9 << 4 | 1,
DEDICATED_COMPRESSOR_4KB = 9 << 4 | 2,
DEDICATED_COMPRESSOR_8KB = 10 << 4 | 3,
DEDICATED_COMPRESSOR_16KB = 11 << 4 | 4,
DEDICATED_COMPRESSOR_32KB = 12 << 4 | 5,
DEDICATED_COMPRESSOR_64KB = 13 << 4 | 6,
DEDICATED_COMPRESSOR_128KB = 14 << 4 | 7,
DEDICATED_COMPRESSOR_256KB = 15 << 4 | 8,
/* Same as 256kb */
DEDICATED_COMPRESSOR = 15 << 4 | 8
} uws_compress_options_t;
DLL_EXPORT typedef enum
{
CONTINUATION = 0,
TEXT = 1,
BINARY = 2,
CLOSE = 8,
PING = 9,
PONG = 10
} uws_opcode_t;
DLL_EXPORT typedef enum
{
BACKPRESSURE,
SUCCESS,
DROPPED
} uws_sendstatus_t;
DLL_EXPORT typedef struct
{
int port;
const char *host;
int options;
} uws_app_listen_config_t;
DLL_EXPORT typedef struct {
bool ok;
bool has_responded;
} uws_try_end_result_t;
DLL_EXPORT struct uws_app_s;
DLL_EXPORT struct uws_req_s;
DLL_EXPORT struct uws_res_s;
DLL_EXPORT struct uws_websocket_s;
DLL_EXPORT struct uws_header_iterator_s;
DLL_EXPORT typedef struct uws_app_s uws_app_t;
DLL_EXPORT typedef struct uws_req_s uws_req_t;
DLL_EXPORT typedef struct uws_res_s uws_res_t;
DLL_EXPORT typedef struct uws_socket_context_s uws_socket_context_t;
DLL_EXPORT typedef struct uws_websocket_s uws_websocket_t;
DLL_EXPORT typedef void (*uws_websocket_handler)(uws_websocket_t *ws, void* user_data);
DLL_EXPORT typedef void (*uws_websocket_message_handler)(uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, void* user_data);
DLL_EXPORT typedef void (*uws_websocket_ping_pong_handler)(uws_websocket_t *ws, const char *message, size_t length, void* user_data);
DLL_EXPORT typedef void (*uws_websocket_close_handler)(uws_websocket_t *ws, int code, const char *message, size_t length, void* user_data);
DLL_EXPORT typedef void (*uws_websocket_upgrade_handler)(uws_res_t *response, uws_req_t *request, uws_socket_context_t *context, void* user_data);
DLL_EXPORT typedef void (*uws_websocket_subscription_handler)(uws_websocket_t *ws, const char *topic_name, size_t topic_name_length, int new_number_of_subscriber, int old_number_of_subscriber, void* user_data);
DLL_EXPORT typedef struct
{
uws_compress_options_t compression;
/* Maximum message size we can receive */
unsigned int maxPayloadLength;
/* 2 minutes timeout is good */
unsigned short idleTimeout;
/* 64kb backpressure is probably good */
unsigned int maxBackpressure;
bool closeOnBackpressureLimit;
/* This one depends on kernel timeouts and is a bad default */
bool resetIdleTimeoutOnSend;
/* A good default, esp. for newcomers */
bool sendPingsAutomatically;
/* Maximum socket lifetime in seconds before forced closure (defaults to disabled) */
unsigned short maxLifetime;
uws_websocket_upgrade_handler upgrade;
uws_websocket_handler open;
uws_websocket_message_handler message;
uws_websocket_handler drain;
uws_websocket_ping_pong_handler ping;
uws_websocket_ping_pong_handler pong;
uws_websocket_close_handler close;
uws_websocket_subscription_handler subscription;
} uws_socket_behavior_t;
DLL_EXPORT typedef void (*uws_listen_handler)(struct us_listen_socket_t *listen_socket, uws_app_listen_config_t config, void *user_data);
DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data);
DLL_EXPORT typedef void (*uws_method_handler)(uws_res_t *response, uws_req_t *request, void *user_data);
DLL_EXPORT typedef void (*uws_filter_handler)(uws_res_t *response, int, void *user_data);
DLL_EXPORT typedef void (*uws_missing_server_handler)(const char *hostname, size_t hostname_length, void *user_data);
DLL_EXPORT typedef void (*uws_get_headers_server_handler)(const char *header_name, size_t header_name_size, const char *header_value, size_t header_value_size, void *user_data);
//Basic HTTP
DLL_EXPORT uws_app_t *uws_create_app(int ssl, struct us_bun_socket_context_options_t options);
DLL_EXPORT void uws_app_destroy(int ssl, uws_app_t *app);
DLL_EXPORT void uws_app_get(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_post(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_options(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_delete(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_patch(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_put(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_head(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_connect(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_trace(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_any(int ssl, uws_app_t *app, const char *pattern, uws_method_handler handler, void *user_data);
DLL_EXPORT void uws_app_run(int ssl, uws_app_t *);
DLL_EXPORT void uws_app_listen(int ssl, uws_app_t *app, int port, uws_listen_handler handler, void *user_data);
DLL_EXPORT void uws_app_listen_with_config(int ssl, uws_app_t *app, uws_app_listen_config_t config, uws_listen_handler handler, void *user_data);
DLL_EXPORT void uws_app_listen_domain(int ssl, uws_app_t *app, const char *domain, size_t domain_length, uws_listen_domain_handler handler, void *user_data);
DLL_EXPORT void uws_app_listen_domain_with_options(int ssl, uws_app_t *app, const char *domain,size_t domain_length, int options, uws_listen_domain_handler handler, void *user_data);
DLL_EXPORT void uws_app_domain(int ssl, uws_app_t *app, const char* server_name, size_t server_name_length);
DLL_EXPORT bool uws_constructor_failed(int ssl, uws_app_t *app);
DLL_EXPORT unsigned int uws_num_subscribers(int ssl, uws_app_t *app, const char *topic, size_t topic_length);
DLL_EXPORT bool uws_publish(int ssl, uws_app_t *app, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress);
DLL_EXPORT void *uws_get_native_handle(int ssl, uws_app_t *app);
DLL_EXPORT void uws_remove_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length);
DLL_EXPORT void uws_add_server_name(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length);
DLL_EXPORT void uws_add_server_name_with_options(int ssl, uws_app_t *app, const char *hostname_pattern, size_t hostname_pattern_length, struct us_bun_socket_context_options_t options);
DLL_EXPORT void uws_missing_server_name(int ssl, uws_app_t *app, uws_missing_server_handler handler, void *user_data);
DLL_EXPORT void uws_filter(int ssl, uws_app_t *app, uws_filter_handler handler, void *user_data);
//WebSocket
DLL_EXPORT void uws_ws(int ssl, uws_app_t *app, const char *pattern, uws_socket_behavior_t behavior, void* user_data);
DLL_EXPORT void *uws_ws_get_user_data(int ssl, uws_websocket_t *ws);
DLL_EXPORT void uws_ws_close(int ssl, uws_websocket_t *ws);
DLL_EXPORT uws_sendstatus_t uws_ws_send(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode);
DLL_EXPORT uws_sendstatus_t uws_ws_send_with_options(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress, bool fin);
DLL_EXPORT uws_sendstatus_t uws_ws_send_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress);
DLL_EXPORT uws_sendstatus_t uws_ws_send_first_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress);
DLL_EXPORT uws_sendstatus_t uws_ws_send_first_fragment_with_opcode(int ssl, uws_websocket_t *ws, const char *message, size_t length, uws_opcode_t opcode, bool compress);
DLL_EXPORT uws_sendstatus_t uws_ws_send_last_fragment(int ssl, uws_websocket_t *ws, const char *message, size_t length, bool compress);
DLL_EXPORT void uws_ws_end(int ssl, uws_websocket_t *ws, int code, const char *message, size_t length);
DLL_EXPORT void uws_ws_cork(int ssl, uws_websocket_t *ws, void (*handler)(void *user_data), void *user_data);
DLL_EXPORT bool uws_ws_subscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length);
DLL_EXPORT bool uws_ws_unsubscribe(int ssl, uws_websocket_t *ws, const char *topic, size_t length);
DLL_EXPORT bool uws_ws_is_subscribed(int ssl, uws_websocket_t *ws, const char *topic, size_t length);
DLL_EXPORT void uws_ws_iterate_topics(int ssl, uws_websocket_t *ws, void (*callback)(const char *topic, size_t length, void *user_data), void *user_data);
DLL_EXPORT bool uws_ws_publish(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length);
DLL_EXPORT bool uws_ws_publish_with_options(int ssl, uws_websocket_t *ws, const char *topic, size_t topic_length, const char *message, size_t message_length, uws_opcode_t opcode, bool compress);
DLL_EXPORT unsigned int uws_ws_get_buffered_amount(int ssl, uws_websocket_t *ws);
DLL_EXPORT size_t uws_ws_get_remote_address(int ssl, uws_websocket_t *ws, const char **dest);
DLL_EXPORT size_t uws_ws_get_remote_address_as_text(int ssl, uws_websocket_t *ws, const char **dest);
DLL_EXPORT void uws_res_get_remote_address_info(uws_res_t *res, const char **dest, size_t *length, unsigned int *port);
//Response
DLL_EXPORT void uws_res_end(int ssl, uws_res_t *res, const char *data, size_t length, bool close_connection);
DLL_EXPORT uws_try_end_result_t uws_res_try_end(int ssl, uws_res_t *res, const char *data, size_t length, uint64_t total_size, bool close_connection);
DLL_EXPORT void uws_res_cork(int ssl, uws_res_t *res, void(*callback)(uws_res_t *res, void* user_data) ,void* user_data);
DLL_EXPORT void uws_res_pause(int ssl, uws_res_t *res);
DLL_EXPORT void uws_res_resume(int ssl, uws_res_t *res);
DLL_EXPORT void uws_res_write_continue(int ssl, uws_res_t *res);
DLL_EXPORT void uws_res_write_status(int ssl, uws_res_t *res, const char *status, size_t length);
DLL_EXPORT void uws_res_write_header(int ssl, uws_res_t *res, const char *key, size_t key_length, const char *value, size_t value_length);
DLL_EXPORT void uws_res_write_header_int(int ssl, uws_res_t *res, const char *key, size_t key_length, uint64_t value);
DLL_EXPORT void uws_res_end_without_body(int ssl, uws_res_t *res, bool close_connection);
DLL_EXPORT bool uws_res_write(int ssl, uws_res_t *res, const char *data, size_t length);
DLL_EXPORT uint64_t uws_res_get_write_offset(int ssl, uws_res_t *res);
DLL_EXPORT void uws_res_override_write_offset(int ssl, uws_res_t *res, uint64_t offset);
DLL_EXPORT bool uws_res_has_responded(int ssl, uws_res_t *res);
DLL_EXPORT void uws_res_on_writable(int ssl, uws_res_t *res, bool (*handler)(uws_res_t *res, uint64_t, void *optional_data), void *user_data);
DLL_EXPORT void uws_res_on_aborted(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, void *optional_data), void *optional_data);
DLL_EXPORT void uws_res_on_data(int ssl, uws_res_t *res, void (*handler)(uws_res_t *res, const char *chunk, size_t chunk_length, bool is_end, void *optional_data), void *optional_data);
DLL_EXPORT void uws_res_upgrade(int ssl, uws_res_t *res, void *data, const char *sec_web_socket_key, size_t sec_web_socket_key_length, const char *sec_web_socket_protocol, size_t sec_web_socket_protocol_length, const char *sec_web_socket_extensions, size_t sec_web_socket_extensions_length, uws_socket_context_t *ws);
DLL_EXPORT size_t uws_res_get_remote_address(int ssl, uws_res_t *res, const char **dest);
DLL_EXPORT size_t uws_res_get_remote_address_as_text(int ssl, uws_res_t *res, const char **dest);
#ifdef UWS_WITH_PROXY
DLL_EXPORT size_t uws_res_get_proxied_remote_address(int ssl, uws_res_t *res, const char **dest);
DLL_EXPORT size_t uws_res_get_proxied_remote_address_as_text(int ssl, uws_res_t *res, const char **dest);
#endif
DLL_EXPORT void *uws_res_get_native_handle(int ssl, uws_res_t *res);
//Request
DLL_EXPORT bool uws_req_is_ancient(uws_req_t *res);
DLL_EXPORT bool uws_req_get_yield(uws_req_t *res);
DLL_EXPORT void uws_req_set_yield(uws_req_t *res, bool yield);
DLL_EXPORT size_t uws_req_get_url(uws_req_t *res, const char **dest);
DLL_EXPORT size_t uws_req_get_full_url(uws_req_t *res, const char **dest);
DLL_EXPORT size_t uws_req_get_method(uws_req_t *res, const char **dest);
DLL_EXPORT size_t uws_req_get_case_sensitive_method(uws_req_t *res, const char **dest);
DLL_EXPORT size_t uws_req_get_header(uws_req_t *res, const char *lower_case_header, size_t lower_case_header_length, const char **dest);
DLL_EXPORT void uws_req_for_each_header(uws_req_t *res, uws_get_headers_server_handler handler, void *user_data);
DLL_EXPORT size_t uws_req_get_query(uws_req_t *res, const char *key, size_t key_length, const char **dest);
DLL_EXPORT size_t uws_req_get_parameter(uws_req_t *res, unsigned short index, const char **dest);
DLL_EXPORT struct us_loop_t *uws_get_loop();
DLL_EXPORT struct us_loop_t *uws_get_loop_with_native(void* existing_native_loop);
DLL_EXPORT void uws_loop_defer(struct us_loop_t *loop, void( cb(void *user_data) ), void *user_data);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -124,7 +124,7 @@ private:
/* Signal broken HTTP request only if we have a pending request */
if (httpResponseData->onAborted) {
httpResponseData->onAborted((HttpResponse<SSL> *)s, httpResponseData->userData);
httpResponseData->onAborted();
}
/* Destruct socket ext */
@@ -258,7 +258,7 @@ private:
}
/* We might respond in the handler, so do not change timeout after this */
httpResponseData->inStream(static_cast<HttpResponse<SSL>*>(user), data.data(), data.length(), fin, httpResponseData->userData);
httpResponseData->inStream(data, fin);
/* Was the socket closed? */
if (us_socket_is_closed(SSL, (struct us_socket_t *) user)) {
@@ -366,7 +366,7 @@ private:
/* We expect the developer to return whether or not write was successful (true).
* If write was never called, the developer should still return true so that we may drain. */
bool success = httpResponseData->callOnWritable((HttpResponse<SSL> *)asyncSocket, httpResponseData->offset);
bool success = httpResponseData->callOnWritable(httpResponseData->offset);
/* The developer indicated that their onWritable failed. */
if (!success) {

View File

@@ -558,11 +558,10 @@ public:
}
/* Attach handler for writable HTTP response */
HttpResponse *onWritable(void* userData, HttpResponseData<SSL>::OnWritableCallback handler) {
HttpResponse *onWritable(MoveOnlyFunction<bool(uint64_t)> &&handler) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->userData = userData;
httpResponseData->onWritable = handler;
httpResponseData->onWritable = std::move(handler);
return this;
}
@@ -575,31 +574,17 @@ public:
}
/* Attach handler for aborted HTTP request */
HttpResponse *onAborted(void* userData, HttpResponseData<SSL>::OnAbortedCallback handler) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->userData = userData;
httpResponseData->onAborted = handler;
return this;
}
HttpResponse* clearOnWritableAndAborted() {
HttpResponse *onAborted(MoveOnlyFunction<void()> &&handler) {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->onWritable = nullptr;
httpResponseData->onAborted = nullptr;
httpResponseData->onAborted = std::move(handler);
return this;
}
HttpResponse* clearOnAborted() {
HttpResponseData<SSL> *httpResponseData = getHttpResponseData();
httpResponseData->onAborted = nullptr;
return this;
}
/* Attach a read handler for data sent. Will be called with FIN set true if last segment. */
void onData(void* userData, HttpResponseData<SSL>::OnDataCallback handler) {
void onData(MoveOnlyFunction<void(std::string_view, bool)> &&handler) {
HttpResponseData<SSL> *data = getHttpResponseData();
data->userData = userData;
data->inStream = handler;
data->inStream = std::move(handler);
/* Always reset this counter here */
data->received_bytes_per_timeout = 0;

View File

@@ -33,10 +33,6 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
template <bool> friend struct HttpResponse;
template <bool> friend struct HttpContext;
public:
using OnWritableCallback = bool (*)(uWS::HttpResponse<SSL>*, uint64_t, void*);
using OnAbortedCallback = void (*)(uWS::HttpResponse<SSL>*, void*);
using OnDataCallback = void (*)(uWS::HttpResponse<SSL>* response, const char* chunk, size_t chunk_length, bool, void*);
/* When we are done with a response we mark it like so */
void markDone() {
onAborted = nullptr;
@@ -50,15 +46,15 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
}
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
bool callOnWritable( uWS::HttpResponse<SSL>* response, uint64_t offset) {
bool callOnWritable(uint64_t offset) {
/* Borrow real onWritable */
auto* borrowedOnWritable = std::move(onWritable);
MoveOnlyFunction<bool(uint64_t)> borrowedOnWritable = std::move(onWritable);
/* Set onWritable to placeholder */
onWritable = [](uWS::HttpResponse<SSL>*, uint64_t, void*) {return true;};
onWritable = [](uint64_t) {return true;};
/* Run borrowed onWritable */
bool ret = borrowedOnWritable(response, offset, userData);
bool ret = borrowedOnWritable(offset);
/* If we still have onWritable (the placeholder) then move back the real one */
if (onWritable) {
@@ -78,13 +74,10 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
HTTP_CONNECTION_CLOSE = 16 // used
};
/* Shared context pointer */
void* userData = nullptr;
/* Per socket event handlers */
OnWritableCallback onWritable = nullptr;
OnAbortedCallback onAborted = nullptr;
OnDataCallback inStream = nullptr;
MoveOnlyFunction<bool(uint64_t)> onWritable;
MoveOnlyFunction<void()> onAborted;
MoveOnlyFunction<void(std::string_view, bool)> inStream; // onData
/* Outgoing offset */
uint64_t offset = 0;

View File

@@ -95,14 +95,13 @@ private:
// This is both a performance thing, and also to prevent freeing some things which are not meant to be freed
// such as uv_tty_t
if(loop && cleanMe && !bun_is_exiting()) {
cleanMe = false;
loop->free();
}
}
Loop *loop = nullptr;
bool cleanMe = false;
};
static LoopCleaner &getLazyLoop() {
static thread_local LoopCleaner lazyLoop;
return lazyLoop;
@@ -127,12 +126,6 @@ public:
return getLazyLoop().loop;
}
static void clearLoopAtThreadExit() {
if (getLazyLoop().cleanMe) {
getLazyLoop().loop->free();
}
}
/* Freeing the default loop should be done once */
void free() {
LoopData *loopData = (LoopData *) us_loop_ext((us_loop_t *) this);

View File

@@ -20,8 +20,6 @@
#ifndef UWS_PERMESSAGEDEFLATE_H
#define UWS_PERMESSAGEDEFLATE_H
#define UWS_USE_LIBDEFLATE 1
#include <cstdint>
#include <cstring>
@@ -136,9 +134,6 @@ struct ZlibContext {
struct DeflationStream {
z_stream deflationStream = {};
#ifdef UWS_USE_LIBDEFLATE
unsigned char reset_buffer[4096 + 1];
#endif
DeflationStream(CompressOptions compressOptions) {
@@ -159,11 +154,13 @@ struct DeflationStream {
/* Run a fast path in case of shared_compressor */
if (reset) {
size_t written = 0;
written = libdeflate_deflate_compress(zlibContext->compressor, raw.data(), raw.length(), reset_buffer, 4096);
static unsigned char buf[1024 + 1];
written = libdeflate_deflate_compress(zlibContext->compressor, raw.data(), raw.length(), buf, 1024);
if (written) {
memcpy(&reset_buffer[written], "\x00", 1);
return std::string_view((char *) reset_buffer, written + 1);
memcpy(&buf[written], "\x00", 1);
return std::string_view((char *) buf, written + 1);
}
}
#endif
@@ -217,9 +214,6 @@ struct DeflationStream {
struct InflationStream {
z_stream inflationStream = {};
#ifdef UWS_USE_LIBDEFLATE
char buf[4096];
#endif
InflationStream(CompressOptions compressOptions) {
/* Inflation windowBits are the top 8 bits of the 16 bit compressOptions */
@@ -236,12 +230,13 @@ struct InflationStream {
#ifdef UWS_USE_LIBDEFLATE
/* Try fast path first */
size_t written = 0;
static char buf[1024];
/* We have to pad 9 bytes and restore those bytes when done since 9 is more than 6 of next WebSocket message */
char tmp[9];
memcpy(tmp, (char *) compressed.data() + compressed.length(), 9);
memcpy((char *) compressed.data() + compressed.length(), "\x00\x00\xff\xff\x01\x00\x00\xff\xff", 9);
libdeflate_result res = libdeflate_deflate_decompress(zlibContext->decompressor, compressed.data(), compressed.length() + 9, buf, 4096, &written);
libdeflate_result res = libdeflate_deflate_decompress(zlibContext->decompressor, compressed.data(), compressed.length() + 9, buf, 1024, &written);
memcpy((char *) compressed.data() + compressed.length(), tmp, 9);
if (res == 0) {

View File

@@ -79,10 +79,6 @@ Build-Dependency `
-Script "lshpack" `
-Outputs @("lshpack.lib")
Build-Dependency `
-Script "libdeflate" `
-Outputs @("deflate.lib")
if (!($Script:DidAnything)) {
Write-Host "(run with -Force to rebuild all)"
}

View File

@@ -3,7 +3,7 @@ set -eo pipefail
source "$(dirname -- "${BASH_SOURCE[0]}")/env.sh"
if [[ "$CI" ]]; then
$(dirname -- "${BASH_SOURCE[0]}")/update-submodules.sh
$(dirname -- "${BASH_SOURCE[0]}")/update-submodules.sh
fi
FORCE=
@@ -36,11 +36,9 @@ fi
dep() {
local submodule="$1"
local script="$2"
CACHE_KEY=
if [ "$CACHE" == "1" ]; then
local hash="$(echo "$SUBMODULES" | grep "$submodule" | awk '{print $1}')"
local os="$(uname -s | tr '[:upper:]' '[:lower:]')"
local arch="$(uname -m)"
CACHE_KEY="$submodule/$hash-$os-$arch-$CPU_TARGET"
CACHE_KEY="$submodule/$(echo "$SUBMODULES" | grep "$submodule" | git hash-object --stdin)"
fi
if [ -z "$FORCE" ]; then
HAS_ALL_DEPS=1
@@ -94,7 +92,6 @@ dep mimalloc mimalloc libmimalloc.a libmimalloc.o
dep tinycc tinycc libtcc.a
dep zlib zlib libz.a
dep zstd zstd libzstd.a
dep libdeflate libdeflate libdeflate.a
dep ls-hpack lshpack liblshpack.a
if [ "$BUILT_ANY" -eq 0 ]; then

View File

@@ -3,7 +3,6 @@ $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pip
Push-Location (Join-Path $BUN_DEPS_DIR 'boringssl')
try {
Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build
Set-Location (mkdir -Force build)
Run cmake @CMAKE_FLAGS ..

View File

@@ -6,7 +6,7 @@ cd $BUN_DEPS_DIR/boringssl
mkdir -p build
cd build
cmake "${CMAKE_FLAGS[@]}" -GNinja ..
cmake "${CMAKE_FLAGS[@]}" -DCMAKE_EXE_LINKER_FLAGS="-fuse-ld=lld" -GNinja ..
ninja libcrypto.a libssl.a libdecrepit.a
cp **/libcrypto.a $BUN_DEPS_OUT_DIR/libcrypto.a

View File

@@ -1,29 +1,29 @@
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
.\scripts\env.ps1
$Tag = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
$CANARY_REVISION = 0
.\scripts\env.ps1 $Tag
.\scripts\update-submodules.ps1
.\scripts\build-libuv.ps1 -CloneOnly $True
# libdeflate.h is needed otherwise the build fails
git submodule update --init --recursive --progress --depth=1 --checkout src/deps/libdeflate
cd build
cmake .. @CMAKE_FLAGS `
-G Ninja `
-DCMAKE_BUILD_TYPE=Release `
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=0 `
-DNO_CONFIGURE_DEPENDS=1 `
-DBUN_CPP_ONLY=1
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_CPP_ONLY=1 $Flags
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
.\compile-cpp-only.ps1 -v -j $env:CPUS
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
# HACK: For some reason, the buildkite agent is hanging when uploading bun-cpp-objects.a
# Best guess is that there is an issue when uploading files larger than 500 MB
#
# For now, use FileSplitter to split the file into smaller chunks:
# https://www.powershellgallery.com/packages/FileSplitter/1.3
if ($env:BUILDKITE) {
Split-File -Path (Resolve-Path "bun-cpp-objects.a") -PartSizeBytes "50MB" -Verbose
}
.\compile-cpp-only.ps1 -v
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }

48
scripts/build-bun-cpp.sh Executable file
View File

@@ -0,0 +1,48 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
while [[ $# -gt 0 ]]; do
case "$1" in
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
mkdir -p build
cd build
mkdir -p tmp_modules tmp_functions js codegen
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=${USE_LTO} \
-DCPU_TARGET=${CPU_TARGET} \
-DBUN_CPP_ONLY=1 \
-DNO_CONFIGURE_DEPENDS=1
chmod +x ./compile-cpp-only.sh
bash ./compile-cpp-only.sh -v

95
scripts/build-bun-zig.sh Executable file
View File

@@ -0,0 +1,95 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cwd=$(pwd)
zig=
if [[ "$CI" ]]; then
# Since the zig build depends on files from the zig submodule,
# make sure to update the submodule before building.
git submodule update --init --recursive --progress --depth=1 --checkout src/deps/zig
# Also update the correct version of zig in the submodule.
$(dirname -- "${BASH_SOURCE[0]}")/download-zig.sh
fi
if [ -f "$cwd/.cache/zig/zig" ]; then
zig="$cwd/.cache/zig/zig"
else
zig=$(which zig)
fi
ZIG_OPTIMIZE="${ZIG_OPTIMIZE:-ReleaseFast}"
CANARY="${CANARY:-0}"
GIT_SHA="${GIT_SHA:-$(git rev-parse HEAD)}"
BUILD_MACHINE_ARCH="${BUILD_MACHINE_ARCH:-$(uname -m)}"
DOCKER_MACHINE_ARCH=""
if [[ "$BUILD_MACHINE_ARCH" == "x86_64" || "$BUILD_MACHINE_ARCH" == "amd64" ]]; then
BUILD_MACHINE_ARCH="x86_64"
DOCKER_MACHINE_ARCH="amd64"
elif [[ "$BUILD_MACHINE_ARCH" == "aarch64" || "$BUILD_MACHINE_ARCH" == "arm64" ]]; then
BUILD_MACHINE_ARCH="aarch64"
DOCKER_MACHINE_ARCH="arm64"
fi
TARGET_OS="${1:-linux}"
TARGET_ARCH="${2:-x64}"
TARGET_CPU="${3:-${CPU_TARGET:-native}}"
BUILDARCH=""
if [[ "$TARGET_ARCH" == "x64" || "$TARGET_ARCH" == "x86_64" || "$TARGET_ARCH" == "amd64" ]]; then
TARGET_ARCH="x86_64"
BUILDARCH="amd64"
elif [[ "$TARGET_ARCH" == "aarch64" || "$TARGET_ARCH" == "arm64" ]]; then
TARGET_ARCH="aarch64"
BUILDARCH="arm64"
fi
TRIPLET=""
if [[ "$TARGET_OS" == "linux" ]]; then
TRIPLET="$TARGET_ARCH-linux-gnu"
elif [[ "$TARGET_OS" == "darwin" ]]; then
TRIPLET="$TARGET_ARCH-macos-none"
elif [[ "$TARGET_OS" == "windows" ]]; then
TRIPLET="$TARGET_ARCH-windows-msvc"
fi
echo "--- Building identifier-cache"
$zig run src/js_lexer/identifier_data.zig
echo "--- Building node-fallbacks"
cd src/node-fallbacks
bun install --frozen-lockfile
bun run build
cd "$cwd"
echo "--- Building codegen"
bun install --frozen-lockfile
make runtime_js fallback_decoder bun_error
echo "--- Building modules"
mkdir -p build
bun run src/codegen/bundle-modules.ts --debug=OFF build
echo "--- Building zig"
cd build
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DUSE_LTO=ON \
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
-DGIT_SHA="${GIT_SHA}" \
-DARCH="${TARGET_ARCH}" \
-DBUILDARCH="${BUILDARCH}" \
-DCPU_TARGET="${TARGET_CPU}" \
-DZIG_TARGET="${TRIPLET}" \
-DASSERTIONS="OFF" \
-DWEBKIT_DIR="omit" \
-DNO_CONFIGURE_DEPENDS=1 \
-DNO_CODEGEN=1 \
-DBUN_ZIG_OBJ_DIR="$cwd/build" \
-DCANARY="$CANARY" \
-DZIG_LIB_DIR=src/deps/zig/lib
ONLY_ZIG=1 ninja "$cwd/build/bun-zig.o" -v

View File

@@ -1,12 +1,10 @@
#!/usr/bin/env bash
set -exo pipefail
export FORCE_PIC=1
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
cd $BUN_DEPS_DIR/c-ares
rm -rf build CMakeCache.txt CMakeFiles
rm -rf build
mkdir -p build
cd build
@@ -14,9 +12,8 @@ cd build
cmake "${CMAKE_FLAGS[@]}" .. \
-DCMAKE_INSTALL_LIBDIR=lib \
-DCARES_STATIC=ON \
-DCARES_STATIC_PIC=OFF \
-DCARES_STATIC_PIC=ON \
-DCARES_SHARED=OFF \
-DCARES_BUILD_TOOLS=ON \
-G "Ninja"
ninja

View File

@@ -1,6 +1,5 @@
#!/usr/bin/env bash
set -exo pipefail
export FORCE_PIC=1
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR

View File

@@ -1,16 +0,0 @@
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
. (Join-Path $PSScriptRoot "env.ps1")
Push-Location (Join-Path $BUN_DEPS_DIR 'libdeflate')
try {
Remove-Item CMakeCache.txt, CMakeFiles, build -Recurse -ErrorAction SilentlyContinue
mkdir -Force build
Run cmake -S "." -B build @CMAKE_FLAGS -DLIBDEFLATE_BUILD_STATIC_LIB=ON -DLIBDEFLATE_BUILD_SHARED_LIB=OFF -DLIBDEFLATE_BUILD_GZIP=OFF
Run cmake --build build --clean-first --config Release
# In https://github.com/ebiggers/libdeflate/releases/tag/v1.20, it's outputting libdeflate.a even on Windows
Copy-Item build/deflatestatic.lib $BUN_DEPS_OUT_DIR/deflate.lib
Write-Host "-> deflate.lib"
} finally { Pop-Location }

View File

@@ -1,10 +0,0 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/libdeflate
rm -rf build CMakeCache.txt CMakeFiles
cmake "${CMAKE_FLAGS[@]}" -DLIBDEFLATE_BUILD_STATIC_LIB=ON -DLIBDEFLATE_BUILD_SHARED_LIB=OFF -DLIBDEFLATE_BUILD_GZIP=OFF -B build -S . -G Ninja
ninja libdeflate.a -C build
cp build/libdeflate.a $BUN_DEPS_OUT_DIR/libdeflate.a

View File

@@ -2,11 +2,11 @@
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
rm -rf CMakeFiles CMakeCache build.ninja
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/ls-hpack
rm -rf CMakeCache* CMakeFiles
cmake "${CMAKE_FLAGS[@]}" . \
@@ -15,6 +15,6 @@ cmake "${CMAKE_FLAGS[@]}" . \
-DSHARED=0 \
-GNinja
ninja libls-hpack.a
ninja
cp ./libls-hpack.a $BUN_DEPS_OUT_DIR/liblshpack.a

View File

@@ -3,7 +3,6 @@ $ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pip
Push-Location (Join-Path $BUN_DEPS_DIR 'mimalloc')
try {
Remove-Item -ErrorAction SilentlyContinue -Recurse -Force build
Set-Location (mkdir -Force build)
Run cmake .. @CMAKE_FLAGS `

View File

@@ -20,7 +20,10 @@ try {
Run clang-cl -DTCC_TARGET_PE -DTCC_TARGET_X86_64 config.h -DC2STR -o c2str.exe conftest.c
Run .\c2str.exe .\include\tccdefs.h tccdefs_.h
Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c" "-MT"
$Baseline = $env:BUN_DEV_ENV_SET -eq "Baseline=True"
# TODO: -MT
Run clang-cl @($env:CFLAGS -split ' ') libtcc.c -o tcc.obj "-DTCC_TARGET_PE" "-DTCC_TARGET_X86_64" "-O2" "-W2" "-Zi" "-MD" "-GS-" "-c"
Run llvm-lib "tcc.obj" "-OUT:tcc.lib"
Copy-Item tcc.obj $BUN_DEPS_OUT_DIR/tcc.lib

View File

@@ -4,9 +4,10 @@ source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zlib
rm -rf build
mkdir build
cd build
cmake $CMAKE_FLAGS -G Ninja -DCMAKE_BUILD_TYPE=Release ..
ninja
export CFLAGS="-O3"
if [[ $(uname -s) == 'Darwin' ]]; then
export CFLAGS="$CFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET}"
fi
CFLAGS="${CFLAGS}" ./configure --static
make -j${CPUS}
cp ./libz.a $BUN_DEPS_OUT_DIR/libz.a

View File

@@ -7,5 +7,5 @@ mkdir -p $BUN_DEPS_OUT_DIR
cd $BUN_DEPS_DIR/zstd
rm -rf Release CMakeCache.txt CMakeFiles
cmake "${CMAKE_FLAGS[@]}" -DZSTD_BUILD_STATIC=ON -B Release -S build/cmake -G Ninja
ninja libzstd_static -C Release
ninja -C Release
cp Release/lib/libzstd.a $BUN_DEPS_OUT_DIR/libzstd.a

View File

@@ -1,13 +1,17 @@
param(
[switch]$Baseline = $false
param (
[switch] $Baseline = $False,
[switch] $Fast = $False
)
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
$Target = If ($Baseline) { "windows-x64-baseline" } Else { "windows-x64" }
$Tag = "bun-$Target"
$TagSuffix = If ($Baseline) { "-Baseline" } Else { "" }
$UseBaselineBuild = If ($Baseline) { "ON" } Else { "OFF" }
$UseLto = If ($Fast) { "OFF" } Else { "ON" }
.\scripts\env.ps1
.\scripts\env.ps1 $TagSuffix
mkdir -Force build
buildkite-agent artifact download "**" build --step "${Target}-build-zig"
@@ -17,24 +21,29 @@ mv -Force -ErrorAction SilentlyContinue build\build\bun-deps\* build\bun-deps
mv -Force -ErrorAction SilentlyContinue build\build\* build
Set-Location build
# HACK: See scripts/build-bun-cpp.ps1
Join-File -Path "$(Resolve-Path .)\bun-cpp-objects.a" -Verbose -DeletePartFiles
cmake .. @CMAKE_FLAGS `
-G Ninja `
-DCMAKE_BUILD_TYPE=Release `
$CANARY_REVISION = 0
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
-DNO_CODEGEN=1 `
-DNO_CONFIGURE_DEPENDS=1 `
"-DCPU_TARGET=${CPU_TARGET}" `
"-DCANARY=${CANARY_REVISION}" `
-DBUN_LINK_ONLY=1 `
"-DUSE_BASELINE_BUILD=${UseBaselineBuild}" `
"-DUSE_LTO=${UseLto}" `
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path bun-deps)" `
"-DBUN_CPP_ARCHIVE=$(Resolve-Path bun-cpp-objects.a)" `
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)"
"-DBUN_ZIG_OBJ_DIR=$(Resolve-Path .)" `
"$Flags"
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
ninja -v -j $env:CPUS
ninja -v
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
ls
if ($Fast) {
$Tag = "$Tag-nolto"
}
Set-Location ..
$Dist = mkdir -Force "${Tag}"
cp -r build\bun.exe "$Dist\bun.exe"

80
scripts/buildkite-link-bun.sh Executable file
View File

@@ -0,0 +1,80 @@
#!/usr/bin/env bash
set -exo pipefail
source $(dirname -- "${BASH_SOURCE[0]}")/env.sh
export USE_LTO="${USE_LTO:-ON}"
case "$(uname -m)" in
aarch64|arm64)
export CPU_TARGET="${CPU_TARGET:-native}"
;;
*)
export CPU_TARGET="${CPU_TARGET:-haswell}"
;;
esac
export TAG=""
while [[ $# -gt 0 ]]; do
case "$1" in
--tag)
export TAG="$2"
shift
shift
;;
--fast|--no-lto)
export USE_LTO="OFF"
shift
;;
--baseline)
export CPU_TARGET="nehalem"
shift
;;
--cpu)
export CPU_TARGET="$2"
shift
shift
;;
*|-*|--*)
echo "Unknown option $1"
exit 1
;;
esac
done
if [[ -z "$TAG" ]]; then
echo "--tag <name> is required"
exit 1
fi
rm -rf release
mkdir -p release
buildkite-agent artifact download '**' release --step $TAG-build-deps
buildkite-agent artifact download '**' release --step $TAG-build-zig
buildkite-agent artifact download '**' release --step $TAG-build-cpp
cd release
cmake .. \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DCPU_TARGET=${CPU_TARGET} \
-DUSE_LTO=${USE_LTO} \
-DBUN_LINK_ONLY=1 \
-DBUN_ZIG_OBJ_DIR="$(pwd)/build" \
-DBUN_CPP_ARCHIVE="$(pwd)/build/bun-cpp-objects.a" \
-DBUN_DEPS_OUT_DIR="$(pwd)/build/bun-deps" \
-DNO_CONFIGURE_DEPENDS=1
ninja -v
if [[ "${USE_LTO}" == "OFF" ]]; then
TAG="${TAG}-nolto"
fi
chmod +x bun-profile bun
mkdir -p bun-$TAG-profile/ bun-$TAG/
mv bun-profile bun-$TAG-profile/bun-profile
mv bun bun-$TAG/bun
zip -r bun-$TAG-profile.zip bun-$TAG-profile
zip -r bun-$TAG.zip bun-$TAG
cd ..
mv release/bun-$TAG.zip bun-$TAG.zip
mv release/bun-$TAG-profile.zip bun-$TAG-profile.zip

View File

@@ -1,3 +1,11 @@
param(
[switch]$Baseline = $false
)
if ($ENV:BUN_DEV_ENV_SET -eq "Baseline=True") {
$Baseline = $true
}
$ErrorActionPreference = 'Stop' # Setting strict mode, similar to 'set -euo pipefail' in bash
# this is the environment script for building bun's dependencies
@@ -30,29 +38,25 @@ if($Env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targetting 32 bit. This configuration is definetly a mistake."
}
$ENV:BUN_DEV_ENV_SET = "Baseline=$Baseline";
$BUN_BASE_DIR = if ($env:BUN_BASE_DIR) { $env:BUN_BASE_DIR } else { Join-Path $ScriptDir '..' }
$BUN_DEPS_DIR = if ($env:BUN_DEPS_DIR) { $env:BUN_DEPS_DIR } else { Join-Path $BUN_BASE_DIR 'src\deps' }
$BUN_DEPS_OUT_DIR = if ($env:BUN_DEPS_OUT_DIR) { $env:BUN_DEPS_OUT_DIR } else { Join-Path $BUN_BASE_DIR 'build\bun-deps' }
$CPUS = if ($env:CPUS) { $env:CPUS } else { (Get-CimInstance -Class Win32_Processor).NumberOfCores }
$Lto = if ($env:USE_LTO) { $env:USE_LTO -eq "1" } else { True }
$Baseline = if ($env:USE_BASELINE_BUILD) {
$env:USE_BASELINE_BUILD -eq "1"
} elseif ($env:BUILDKITE_STEP_KEY -match "baseline") {
True
} else {
False
}
$CC = "clang-cl"
$CXX = "clang-cl"
$CFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL'
$CXXFLAGS = '/O2 /Z7 /MT /O2 /Ob2 /DNDEBUG /U_DLL'
$CFLAGS = '/O2 /Zi '
# $CFLAGS = '/O2 /Z7 /MT'
$CXXFLAGS = '/O2 /Zi '
# $CXXFLAGS = '/O2 /Z7 /MT'
if ($Lto) {
$CXXFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc"
$CFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc"
if ($env:USE_LTO -eq "1") {
$CXXFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc "
$CFLAGS += " -fuse-ld=lld -flto -Xclang -emit-llvm-bc "
}
$CPU_NAME = if ($Baseline) { "nehalem" } else { "haswell" };
@@ -61,32 +65,21 @@ $env:CPU_TARGET = $CPU_NAME
$CFLAGS += " -march=${CPU_NAME}"
$CXXFLAGS += " -march=${CPU_NAME}"
$Canary = If ($env:CANARY) {
$env:CANARY
} ElseIf ($env:BUILDKITE -eq "true") {
(buildkite-agent meta-data get canary)
} Else {
"1"
}
$CMAKE_FLAGS = @(
"-GNinja",
"-DCMAKE_BUILD_TYPE=Release",
"-DCMAKE_C_COMPILER=$CC",
"-DCMAKE_CXX_COMPILER=$CXX",
"-DCMAKE_C_FLAGS=$CFLAGS",
"-DCMAKE_CXX_FLAGS=$CXXFLAGS",
"-DCMAKE_C_FLAGS_RELEASE=$CFLAGS",
"-DCMAKE_CXX_FLAGS_RELEASE=$CXXFLAGS",
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
"-DCANARY=$Canary"
"-DCMAKE_CXX_FLAGS=$CXXFLAGS"
)
if (Get-Command llvm-lib -ErrorAction SilentlyContinue) {
$AR_CMD = Get-Command llvm-lib -ErrorAction SilentlyContinue
$AR = $AR_CMD.Path
$env:AR = $AR
$CMAKE_FLAGS += "-DCMAKE_AR=$AR"
if ($env:USE_LTO -eq "1") {
if (Get-Command lld-lib -ErrorAction SilentlyContinue) {
$AR = Get-Command lld-lib -ErrorAction SilentlyContinue
$env:AR = $AR
$CMAKE_FLAGS += "-DCMAKE_AR=$AR"
}
}
$env:CC = "clang-cl"
@@ -99,10 +92,6 @@ if ($Baseline) {
$CMAKE_FLAGS += "-DUSE_BASELINE_BUILD=ON"
}
if ($Lto) {
$CMAKE_FLAGS += "-DUSE_LTO=ON"
}
if (Get-Command sccache -ErrorAction SilentlyContinue) {
# Continue with local compiler if sccache has an error
$env:SCCACHE_IGNORE_SERVER_IO_ERROR = "1"

View File

@@ -1,18 +1,12 @@
#!/usr/bin/env bash
# Hack for buildkite sometimes not having the right path
# Hack for Buildkite sometimes not having the right path
if [[ "${CI:-}" == "1" || "${CI:-}" == "true" ]]; then
if [ -f ~/.bashrc ]; then
source ~/.bashrc
fi
fi
if [[ $(uname -s) == 'Darwin' ]]; then
export LLVM_VERSION=18
else
export LLVM_VERSION=16
fi
# this is the environment script for building bun's dependencies
# it sets c compiler and flags
export SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
@@ -24,74 +18,17 @@ export BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR:-$BUN_BASE_DIR/build/bun-deps}
export LC_CTYPE="en_US.UTF-8"
export LC_ALL="en_US.UTF-8"
if [[ $(uname -s) == 'Darwin' ]]; then
export CXX="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang++"
export CC="$(brew --prefix llvm)@$LLVM_VERSION/bin/clang"
export AR="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ar"
export RANLIB="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-ranlib"
export LIBTOOL="$(brew --prefix llvm)@$LLVM_VERSION/bin/llvm-libtool-darwin"
export PATH="$(brew --prefix llvm)@$LLVM_VERSION/bin:$PATH"
ln -sf $LIBTOOL "$(brew --prefix llvm)@$LLVM_VERSION/bin/libtool" || true
elif [[ "$CI" != "1" && "$CI" != "true" ]]; then
if [[ -f $SCRIPT_DIR/env.local ]]; then
echo "Sourcing $SCRIPT_DIR/env.local"
source $SCRIPT_DIR/env.local
fi
fi
# this compiler detection could be better
export CC=${CC:-$(which clang-$LLVM_VERSION || which clang || which cc)}
export CXX=${CXX:-$(which clang++-$LLVM_VERSION || which clang++ || which c++)}
export CC=${CC:-$(which clang-16 || which clang || which cc)}
export CXX=${CXX:-$(which clang++-16 || which clang++ || which c++)}
export AR=${AR:-$(which llvm-ar || which ar)}
export CPUS=${CPUS:-$(nproc || sysctl -n hw.ncpu || echo 1)}
export RANLIB=${RANLIB:-$(which llvm-ranlib-$LLVM_VERSION || which llvm-ranlib || which ranlib)}
# on Linux, force using lld as the linker
if [[ $(uname -s) == 'Linux' ]]; then
export LD=${LD:-$(which ld.lld-$LLVM_VERSION || which ld.lld || which ld)}
export LDFLAGS="${LDFLAGS} -fuse-ld=lld "
fi
export CMAKE_CXX_COMPILER=${CXX}
export CMAKE_C_COMPILER=${CC}
export CFLAGS='-O3 -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables '
export CXXFLAGS='-O3 -fno-exceptions -fno-rtti -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-c++-static-destructors '
# Add flags for LTO
# We cannot enable LTO on macOS for dependencies because it requires -fuse-ld=lld and lld causes many segfaults on macOS (likely related to stack size)
if [ "$BUN_ENABLE_LTO" == "1" ]; then
export CFLAGS="$CFLAGS -flto=full "
export CXXFLAGS="$CXXFLAGS -flto=full -fwhole-program-vtables -fforce-emit-vtables "
export LDFLAGS="$LDFLAGS -flto=full -fwhole-program-vtables -fforce-emit-vtables "
fi
if [[ $(uname -s) == 'Linux' ]]; then
export CFLAGS="$CFLAGS -ffunction-sections -fdata-sections -faddrsig "
export CXXFLAGS="$CXXFLAGS -ffunction-sections -fdata-sections -faddrsig "
export LDFLAGS="${LDFLAGS} -Wl,-z,norelro"
fi
# Clang 18 on macOS needs to have -fno-define-target-os-macros to fix a zlib build issue
# https://gitlab.kitware.com/cmake/cmake/-/issues/25755
if [[ $(uname -s) == 'Darwin' && $LLVM_VERSION == '18' ]]; then
export CFLAGS="$CFLAGS -fno-define-target-os-macros "
export CXXFLAGS="$CXXFLAGS -fno-define-target-os-macros -D_LIBCXX_ENABLE_ASSERTIONS=0 -D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_NONE "
fi
# libarchive needs position-independent executables to compile successfully
if [ -n "$FORCE_PIC" ]; then
export CFLAGS="$CFLAGS -fPIC "
export CXXFLAGS="$CXXFLAGS -fPIC "
elif [[ $(uname -s) == 'Linux' ]]; then
export CFLAGS="$CFLAGS -fno-pie -fno-pic "
export CXXFLAGS="$CXXFLAGS -fno-pie -fno-pic "
fi
if [[ $(uname -s) == 'Linux' && ($(uname -m) == 'aarch64' || $(uname -m) == 'arm64') ]]; then
export CFLAGS="$CFLAGS -march=armv8-a+crc -mtune=ampere1 "
export CXXFLAGS="$CXXFLAGS -march=armv8-a+crc -mtune=ampere1 "
fi
export CFLAGS='-O3 -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer'
export CXXFLAGS='-O3 -fno-exceptions -fno-rtti -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer'
export CMAKE_FLAGS=(
-DCMAKE_C_COMPILER="${CC}"
@@ -119,7 +56,8 @@ if [[ $(uname -s) == 'Linux' ]]; then
fi
if [[ $(uname -s) == 'Darwin' ]]; then
export CMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET:-13.0}
export CMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET:-12.0}
CMAKE_FLAGS+=(-DCMAKE_OSX_DEPLOYMENT_TARGET=${CMAKE_OSX_DEPLOYMENT_TARGET})
export CFLAGS="$CFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 "
export CXXFLAGS="$CXXFLAGS -mmacosx-version-min=${CMAKE_OSX_DEPLOYMENT_TARGET} -D__DARWIN_NON_CANCELABLE=1 "

View File

@@ -6,162 +6,16 @@ import { copyFileSync, existsSync, mkdirSync, mkdtempSync, readFileSync, readdir
import { basename, dirname, join } from "node:path";
import { tmpdir } from "node:os";
const projectPath = dirname(import.meta.dirname);
const vendorPath = process.env.BUN_VENDOR_PATH || join(projectPath, "vendor");
const isWindows = process.platform === "win32";
const isMacOS = process.platform === "darwin";
const isLinux = process.platform === "linux";
const cwd = dirname(import.meta.dirname);
const spawnSyncTimeout = 1000 * 60;
const spawnTimeout = 1000 * 60 * 3;
/**
* @typedef {Object} S3UploadOptions
* @property {string} [bucket]
* @property {string} filename
* @property {string} content
* @property {Record<string, string>} [headers]
*/
/**
* @param {S3UploadOptions} options
*/
async function uploadFileToS3(options) {
const { AwsV4Signer } = await import("aws4fetch");
const { bucket, filename, content, ...extra } = options;
const baseUrl = getEnv(["S3_ENDPOINT", "S3_BASE_URL", "AWS_ENDPOINT"], "https://s3.amazonaws.com");
const bucketUrl = new URL(bucket || getEnv(["S3_BUCKET", "AWS_BUCKET"]), baseUrl);
const signer = new AwsV4Signer({
accessKeyId: getSecret(["S3_ACCESS_KEY_ID", "AWS_ACCESS_KEY_ID"]),
secretAccessKey: getSecret(["S3_SECRET_ACCESS_KEY", "AWS_SECRET_ACCESS_KEY"]),
url: new URL(filename, bucketUrl),
method: "PUT",
body: content,
...extra,
});
const { url, method, headers, body } = signer.sign();
await fetchSafe(url, {
method,
headers,
body,
});
console.log("Uploaded file to S3:", {
url: `${bucketUrl}`,
filename,
});
}
/**
* @typedef {Object} SentryRelease
* @property {string} organizationId
* @property {string} projectId
* @property {string} version
* @property {string} [url]
* @property {string} [ref]
* @property {string} [dateReleased]
*/
/**
* @param {SentryRelease} options
* @returns {Promise<void>}
*/
async function createSentryRelease(options) {
const { organizationId, projectId, ...body } = options;
const baseUrl = getEnv("SENTRY_BASE_URL", "https://sentry.io");
const url = new URL(`api/0/organizations/${organizationId}/releases`, baseUrl);
const accessToken = getSecret(["SENTRY_AUTH_TOKEN", "SENTRY_TOKEN"]);
const release = await fetchSafe(url, {
method: "POST",
headers: {
"Authorization": `Bearer ${accessToken}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
format: "json",
});
console.log("Created Sentry release:", release);
}
/**
* @return {string}
*/
function getGithubToken() {
const token = getEnv("GITHUB_TOKEN", null);
if (token) {
return token;
}
const gh = which("gh");
if (gh) {
const { exitCode, stdout } = spawnSyncSafe(gh, ["auth", "token"]);
if (exitCode === 0) {
return stdout.trim();
}
}
throw new Error("Failed to get GitHub token (set GITHUB_TOKEN or run `gh auth login`)");
}
/**
* @param {string | string[]} name
* @return {string}
*/
function getSecret(name) {
return getEnv(name);
}
/**
* @param {string | string[]} name
* @param {string | null} [defaultValue]
* @returns {string | undefined}
*/
function getEnv(name, defaultValue) {
let result = defaultValue;
for (const key of typeof name === "string" ? [name] : name) {
const value = process.env[key];
if (value) {
result = value;
break;
}
}
if (result || result === null) {
return result;
}
throw new Error(`Environment variable is required: ${name}`);
}
/**
* @typedef {Object} SpawnOptions
* @property {boolean} [throwOnError]
* @property {string} [cwd]
* @property {string} [env]
* @property {string} [encoding]
* @property {number} [timeout]
*/
/**
* @typedef {Object} SpawnResult
* @property {number | null} exitCode
* @property {number | null} signalCode
* @property {string} stdout
* @property {string} stderr
*/
/**
* @param {string} command
* @param {string[]} [args]
* @param {SpawnOptions} [options]
* @returns {Promise<SpawnResult>}
*/
async function spawnSafe(command, args, options = {}) {
const result = new Promise((resolve, reject) => {
let stdout = "";
@@ -206,12 +60,6 @@ async function spawnSafe(command, args, options = {}) {
}
}
/**
* @param {string} command
* @param {string[]} [args]
* @param {SpawnOptions} [options]
* @returns {SpawnResult}
*/
function spawnSyncSafe(command, args, options = {}) {
try {
const { error, status, signal, stdout, stderr } = spawnSync(command, args, {
@@ -238,20 +86,6 @@ function spawnSyncSafe(command, args, options = {}) {
}
}
/**
* @typedef {Object} FetchOptions
* @property {string} [method]
* @property {Record<string, string>} [headers]
* @property {string | Uint8Array} [body]
* @property {"json" | "text" | "bytes"} [format]
* @property {boolean} [throwOnError]
*/
/**
* @param {string | URL} url
* @param {FetchOptions} [options]
* @returns {Promise<Response | string | Uint8Array>}
*/
async function fetchSafe(url, options = {}) {
let response;
try {
@@ -304,6 +138,47 @@ function which(command, path) {
return result.trimEnd();
}
function getZigTarget(os = process.platform, arch = process.arch) {
if (arch === "x64") {
if (os === "linux") return "linux-x86_64";
if (os === "darwin") return "macos-x86_64";
if (os === "win32") return "windows-x86_64";
}
if (arch === "arm64") {
if (os === "linux") return "linux-aarch64";
if (os === "darwin") return "macos-aarch64";
}
throw new Error(`Unsupported zig target: os=${os}, arch=${arch}`);
}
function getRecommendedZigVersion() {
const scriptPath = join(projectPath, "build.zig");
try {
const scriptContent = readFileSync(scriptPath, "utf-8");
const match = scriptContent.match(/recommended_zig_version = "([^"]+)"/);
if (!match) {
throw new Error("File does not contain string: 'recommended_zig_version'");
}
return match[1];
} catch (cause) {
throw new Error("Failed to find recommended Zig version", { cause });
}
}
/**
* @returns {Promise<string>}
*/
async function getLatestZigVersion() {
try {
const response = await fetchSafe("https://ziglang.org/download/index.json", { format: "json" });
const { master } = response;
const { version } = master;
return version;
} catch (cause) {
throw new Error("Failed to get latest Zig version", { cause });
}
}
/**
* @param {string} execPath
* @returns {string | undefined}
@@ -316,3 +191,110 @@ function getVersion(execPath) {
}
return result.trim();
}
/**
* @returns {string}
*/
function getTmpdir() {
if (isMacOS && existsSync("/tmp")) {
return "/tmp";
}
return tmpdir();
}
/**
* @returns {string}
*/
function mkTmpdir() {
return mkdtempSync(join(getTmpdir(), "bun-"));
}
/**
* @param {string} url
* @param {string} [path]
* @returns {Promise<string>}
*/
async function downloadFile(url, path) {
const outPath = path || join(mkTmpdir(), basename(url));
const bytes = await fetchSafe(url, { format: "bytes" });
mkdirSync(dirname(outPath), { recursive: true });
writeFileSync(outPath, bytes);
return outPath;
}
/**
* @param {string} tarPath
* @param {string} [path]
* @returns {Promise<string>}
*/
async function extractFile(tarPath, path) {
const outPath = path || join(mkTmpdir(), basename(tarPath));
mkdirSync(outPath, { recursive: true });
await spawnSafe("tar", ["-xf", tarPath, "-C", outPath, "--strip-components=1"]);
return outPath;
}
const dependencies = [
{
name: "zig",
version: getRecommendedZigVersion(),
download: downloadZig,
},
];
async function getDependencyPath(name) {
let dependency;
for (const entry of dependencies) {
if (name === entry.name) {
dependency = entry;
break;
}
}
if (!dependency) {
throw new Error(`Unknown dependency: ${name}`);
}
const { version, download } = dependency;
mkdirSync(vendorPath, { recursive: true });
for (const path of readdirSync(vendorPath)) {
if (!path.startsWith(name)) {
continue;
}
const dependencyPath = join(vendorPath, path);
const dependencyVersion = getVersion(dependencyPath);
if (dependencyVersion === version) {
return dependencyPath;
}
}
if (!download) {
throw new Error(`Dependency not found: ${name}`);
}
return await download(version);
}
/**
* @param {string} [version]
*/
async function downloadZig(version) {
const target = getZigTarget();
const expectedVersion = version || getRecommendedZigVersion();
const url = `https://ziglang.org/builds/zig-${target}-${expectedVersion}.tar.xz`;
const tarPath = await downloadFile(url);
const extractedPath = await extractFile(tarPath);
const zigPath = join(extractedPath, exePath("zig"));
const actualVersion = getVersion(zigPath);
const outPath = join(vendorPath, exePath(`zig-${actualVersion}`));
mkdirSync(dirname(outPath), { recursive: true });
copyFileSync(zigPath, outPath);
return outPath;
}
/**
* @param {string} path
* @returns {string}
*/
function exePath(path) {
return isWindows ? `${path}.exe` : path;
}
const execPath = await getDependencyPath("zig");
console.log(execPath);

View File

@@ -5,20 +5,11 @@ $npm_client = "npm"
$root = Join-Path (Split-Path -Path $MyInvocation.MyCommand.Definition -Parent) "..\"
# search for .cmd or .exe
function Get-Esbuild-Path {
param(
$Path
)
$Result = Join-Path $Path "node_modules\.bin\esbuild.cmd"
if (Test-Path $Result) {
return $Result
}
return Join-Path $Path "node_modules\.bin\esbuild.exe"
$esbuild = Join-Path $root "node_modules\.bin\esbuild.cmd"
if (!(Test-Path $esbuild)) {
$esbuild = Join-Path $root "node_modules\.bin\esbuild.exe"
}
$esbuild = Get-Esbuild-Path $root
$env:NODE_ENV = "production"
@@ -43,5 +34,5 @@ Pop-Location
# node-fallbacks
Push-Location src\node-fallbacks
& ${npm_client} install
& (Get-Esbuild-Path (Get-Location)) --bundle @(Get-Item .\*.js) --outdir=out --format=esm --minify --platform=browser
& ${esbuild} --bundle @(Get-Item .\*.js) --outdir=out --format=esm --minify --platform=browser
Pop-Location

Some files were not shown because too many files have changed in this diff Show More