Compare commits

..

22 Commits

Author SHA1 Message Date
Jarred Sumner
1ab4966bfd Make this test faster in debug builds 2024-10-20 18:55:02 -07:00
Jarred Sumner
002fd74993 Deflake a couple tests 2024-10-20 18:44:19 -07:00
Jarred Sumner
b319c5f7fe Fix a test failure 2024-10-20 18:11:58 -07:00
Jarred Sumner
2c7ea40b65 fix 2024-10-20 17:22:24 -07:00
Jarred Sumner
ba66bf14da Update uws.zig 2024-10-20 17:14:43 -07:00
Jarred Sumner
07d6e31baa windows 2024-10-20 17:13:46 -07:00
Jarred Sumner
33de5926de Merge branch 'main' into jarred/timer 2024-10-20 17:08:31 -07:00
Jarred Sumner
8db87098bb wip 2024-10-18 23:01:53 -07:00
Jarred Sumner
4975b1fd2e Revert "Experiment: keep flushing immediate queue"
This reverts commit cd5c6693b5.
2024-10-17 23:53:15 -07:00
Jarred Sumner
133b0045fc Fix test 2024-10-16 11:30:29 -07:00
Jarred Sumner
bab5fe7588 Merge branch 'main' into jarred/timer 2024-10-16 11:12:05 -07:00
Jarred Sumner
cd5c6693b5 Experiment: keep flushing immediate queue 2024-10-16 01:20:58 -07:00
Jarred Sumner
022c7cf500 Update socket.zig 2024-10-16 00:30:38 -07:00
Jarred Sumner
0ab7ed84cb Fixups 2024-10-16 00:28:07 -07:00
Jarred Sumner
1a0badd830 Fix edgecase 2024-10-15 23:31:17 -07:00
Jarred Sumner
87f070acac Fix edgecase with AbortSignal 2024-10-15 21:58:20 -07:00
Jarred Sumner
5d763fd71e fix bug 2024-10-15 20:39:58 -07:00
Jarred Sumner
6d9e2d437a hmm 2024-10-15 19:58:42 -07:00
Jarred Sumner
a444dcebb8 some tests 2024-10-15 19:17:41 -07:00
Jarred Sumner
cd05556689 auto uncork 2024-10-15 18:35:01 -07:00
Jarred Sumner
ef4435d987 fixup 2024-10-15 18:04:03 -07:00
Jarred Sumner
318eb7bcbf Fix num_polls never going down 2024-10-15 18:00:24 -07:00
2983 changed files with 84239 additions and 206429 deletions

File diff suppressed because it is too large Load Diff

790
.buildkite/ci.yml Normal file
View File

@@ -0,0 +1,790 @@
# Build and test Bun on macOS, Linux, and Windows.
# https://buildkite.com/docs/pipelines/defining-steps
#
# If a step has the `robobun: true` label, robobun will listen
# to webhooks from Buildkite and provision a VM to run the step.
#
# Changes to this file will be automatically uploaded on the next run
# for a particular commit.
steps:
# macOS aarch64
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
- key: "darwin-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-test-macos-14"
label: ":darwin: 14 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
- key: "darwin-aarch64-test-macos-13"
label: ":darwin: 13 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
# macOS x64
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
- key: "darwin-x64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-test-macos-14"
label: ":darwin: 14 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
- key: "darwin-x64-test-macos-13"
label: ":darwin: 13 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
# Linux x64
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64"
- key: "linux-x64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-test-debian-12"
label: ":debian: 12 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
# Linux x64-baseline
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
- key: "linux-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-test-debian-12"
label: ":debian: 12 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
# Linux aarch64
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
- key: "linux-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-test-debian-12"
label: ":debian: 12 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2204"
label: ":ubuntu: 22.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2004"
label: ":ubuntu: 20.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
# Windows x64
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64"
- key: "windows-x64-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-test-bun"
label: ":windows: x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
# Windows x64-baseline
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
- key: "windows-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-test-bun"
label: ":windows: x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-baseline-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"

View File

@@ -2,10 +2,96 @@
set -eo pipefail
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_jq() {
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
}
function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_release() {
if [ "$RELEASE" == "1" ]; then
run_command buildkite-agent meta-data set canary "0"
fi
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ]; then
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
if [ "$tag" == "null" ]; then
canary="1"
else
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
if [ "$revision" == "null" ]; then
canary="1"
else
canary="$revision"
fi
fi
run_command buildkite-agent meta-data set canary "$canary"
fi
}
function upload_buildkite_pipeline() {
local path="$1"
if [ ! -f "$path" ]; then
echo "error: Cannot find pipeline: $path"
exit 1
fi
run_command buildkite-agent pipeline upload "$path"
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
run_command node ".buildkite/ci.mjs" "$@"
assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_release
assert_canary
upload_buildkite_pipeline ".buildkite/ci.yml"

View File

@@ -162,27 +162,6 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local artifact_path="${commit}"
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
assert_main
assert_buildkite_agent
@@ -202,12 +181,6 @@ function create_release() {
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-linux-aarch64-musl.zip
bun-linux-aarch64-musl-profile.zip
bun-linux-x64-musl.zip
bun-linux-x64-musl-profile.zip
bun-linux-x64-musl-baseline.zip
bun-linux-x64-musl-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
@@ -233,7 +206,6 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -11,8 +11,5 @@ packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
.zig-cache
zig-out
build
vendor
node_modules
zig-cache
zig-out

2
.gitattributes vendored
View File

@@ -49,5 +49,3 @@ vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -83,26 +83,6 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -112,7 +92,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -126,40 +106,6 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

View File

@@ -1,92 +0,0 @@
name: Update c-ares
on:
schedule:
- cron: "0 4 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check c-ares version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildCares.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildCares.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildCares.cmake
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-cares-${{ github.run_number }}
body: |
## What does this PR do?
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)

View File

@@ -1,92 +0,0 @@
name: Update libarchive
on:
schedule:
- cron: "0 3 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libarchive version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibArchive.cmake
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libarchive-${{ github.run_number }}
body: |
## What does this PR do?
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)

View File

@@ -1,92 +0,0 @@
name: Update libdeflate
on:
schedule:
- cron: "0 2 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libdeflate version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLibDeflate.cmake
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-libdeflate-${{ github.run_number }}
body: |
## What does this PR do?
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)

View File

@@ -1,92 +0,0 @@
name: Update lolhtml
on:
schedule:
- cron: "0 1 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lolhtml version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLolHtml.cmake
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lolhtml-${{ github.run_number }}
body: |
## What does this PR do?
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)

View File

@@ -1,92 +0,0 @@
name: Update lshpack
on:
schedule:
- cron: "0 5 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lshpack version
id: check-version
run: |
set -euo pipefail
# Extract the commit hash from the line after COMMIT
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
exit 1
fi
# Validate that it looks like a git hash
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid git hash format in BuildLshpack.cmake"
echo "Found: $CURRENT_VERSION"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
if [ -z "$LATEST_RELEASE" ]; then
echo "Error: Failed to fetch latest release from GitHub API"
exit 1
fi
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
echo "Error: Could not extract tag name from GitHub API response"
exit 1
fi
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
exit 1
fi
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
echo "Error: Invalid SHA format received from GitHub"
echo "Found: $LATEST_SHA"
echo "Expected: 40 character hexadecimal string"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
- name: Update version if needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
run: |
set -euo pipefail
# Handle multi-line format where COMMIT and its value are on separate lines
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
- name: Create Pull Request
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
cmake/targets/BuildLshpack.cmake
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
delete-branch: true
branch: deps/update-lshpack-${{ github.run_number }}
body: |
## What does this PR do?
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)

View File

@@ -1,109 +0,0 @@
name: Update SQLite3
on:
schedule:
- cron: "0 6 * * 0" # Run weekly
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check SQLite version
id: check-version
run: |
set -euo pipefail
# Get current version from the header file using SQLITE_VERSION_NUMBER
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
if [ -z "$CURRENT_VERSION_NUM" ]; then
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
exit 1
fi
# Convert numeric version to semantic version for display
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
# Fetch SQLite download page
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
if [ -z "$DOWNLOAD_PAGE" ]; then
echo "Error: Failed to fetch SQLite download page"
exit 1
fi
# Extract latest version and year from the amalgamation link
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
echo "Error: Could not extract latest version info"
exit 1
fi
# Convert numeric version to semantic version for display
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
LATEST_MINOR=$((($LATEST_VERSION_NUM / 1000) % 1000))
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
# Debug output
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
- name: Update SQLite if needed
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
run: |
set -euo pipefail
TEMP_DIR=$(mktemp -d)
cd $TEMP_DIR
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
# Download and extract latest version
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
# Add header comment and copy files
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
- name: Create Pull Request
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
uses: peter-evans/create-pull-request@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: |
src/bun.js/bindings/sqlite/sqlite3.c
src/bun.js/bindings/sqlite/sqlite3_local.h
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
delete-branch: true
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
body: |
## What does this PR do?
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)

26
.gitignore vendored
View File

@@ -26,7 +26,6 @@
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
@@ -54,8 +53,8 @@
/test-report.md
/test.js
/test.ts
/test.zig
/testdir
/test.zig
build
build.ninja
bun-binary
@@ -112,14 +111,10 @@ pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/generated_enum_extractor.zig
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/debug-bindings-obj
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github
@@ -136,7 +131,6 @@ src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
@@ -147,10 +141,6 @@ test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
# Dependencies
/vendor
@@ -158,24 +148,18 @@ test/js/third_party/prisma/prisma/sqlite/dev.db-journal
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/lol*html
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
# Generated files
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}
/src/deps/zlib
/src/deps/zig

View File

@@ -5,5 +5,6 @@ test/js/deno
test/node.js
src/react-refresh.js
*.min.js
test/js/node/test/fixtures
test/js/node/test/common
test/snippets
test/js/node/test

178
.vscode/launch.json generated vendored
View File

@@ -16,13 +16,12 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -32,13 +31,12 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -54,13 +52,12 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -70,13 +67,12 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -86,13 +82,12 @@
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -102,13 +97,12 @@
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -118,14 +112,13 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -140,14 +133,13 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -168,8 +160,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -179,6 +169,7 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
"BUN_DEBUG_IncrementalGraph": "1",
@@ -187,8 +178,6 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -198,12 +187,11 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -213,15 +201,11 @@
"args": ["run", "--watch", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -231,12 +215,11 @@
"args": ["run", "--hot", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -252,8 +235,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -274,8 +255,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -291,13 +270,12 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -307,13 +285,12 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -323,13 +300,12 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -339,13 +315,12 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -355,13 +330,12 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -371,14 +345,13 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -393,14 +366,13 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -416,12 +388,11 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -432,12 +403,11 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -447,12 +417,11 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -462,13 +431,12 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -483,12 +451,11 @@
"args": ["install"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -498,12 +465,11 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -517,6 +483,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -542,6 +512,10 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -567,6 +541,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -592,6 +570,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -617,6 +599,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -651,6 +637,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -686,6 +676,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -711,6 +705,10 @@
"args": ["install"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -732,6 +730,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -753,6 +755,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -783,6 +789,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -814,6 +824,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -839,6 +853,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -864,6 +882,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -889,6 +911,10 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -914,6 +940,10 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -939,6 +969,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -973,6 +1007,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1008,6 +1046,10 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1030,6 +1072,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1051,6 +1097,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1076,6 +1126,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1110,6 +1164,10 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1124,8 +1182,6 @@
},
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [

View File

@@ -63,7 +63,7 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["-header-insertion=never", "-no-unused-includes"],
"clangd.arguments": ["-header-insertion=never"],
// JavaScript
"prettier.enable": true,
@@ -78,7 +78,7 @@
"prettier.prettierPath": "./node_modules/prettier",
// TypeScript
"typescript.tsdk": "node_modules/typescript/lib",
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},

93
.vscode/tasks.json vendored
View File

@@ -2,57 +2,50 @@
"version": "2.0.0",
"tasks": [
{
"label": "Build Bun",
"type": "shell",
"command": "bun run build",
"group": {
"kind": "build",
"isDefault": true,
"type": "process",
"label": "Install Dependencies",
"command": "scripts/all-dependencies.sh",
"windows": {
"command": "scripts/all-dependencies.ps1",
},
"problemMatcher": [
{
"owner": "zig",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s+(.+)$",
"message": 1,
"loop": true,
},
],
},
{
"owner": "clang",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s*(.*)$",
"message": 1,
"loop": true,
},
],
},
],
"presentation": {
"reveal": "always",
"panel": "shared",
"clear": true,
"icon": {
"id": "arrow-down",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Setup Environment",
"dependsOn": ["Install Dependencies"],
"command": "scripts/setup.sh",
"windows": {
"command": "scripts/setup.ps1",
},
"icon": {
"id": "check",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Build Bun",
"dependsOn": ["Setup Environment"],
"command": "bun",
"args": ["run", "build"],
"icon": {
"id": "gear",
},
"options": {
"cwd": "${workspaceFolder}",
},
"isBuildCommand": true,
"runOptions": {
"instanceLimit": 1,
"reevaluateOnRerun": true,
},
},
],

View File

@@ -1,6 +1,6 @@
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
@@ -11,7 +11,7 @@ Bun currently requires `glibc >=2.32` in development which means if you're on Ub
Using your system's package manager, install Bun's dependencies:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
@@ -60,7 +60,7 @@ $ brew install bun
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install llvm@18
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
```bash#Fedora
$ sudo dnf install 'dnf-command(copr)'
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
$ sudo dnf install llvm16 clang16 lld16-devel
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
```
```bash#openSUSE Tumbleweed
@@ -97,7 +97,7 @@ $ which clang-16
If not, run this to manually add it:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
@@ -285,17 +285,17 @@ If you see this error when compiling, run:
$ xcode-select --install
```
### Cannot find `libatomic.a`
## Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
### ccache conflicts with building TinyCC on macOS
## ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
@@ -303,9 +303,3 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`

2
LATEST
View File

@@ -1 +1 @@
1.1.38
1.1.31

View File

@@ -1,5 +1,5 @@
<p align="center">
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

Binary file not shown.

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { copyFileSync, statSync, writeFileSync } from "node:fs";
import { bench, run } from "../runner.mjs";
function runner(ready) {
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const MAP_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const SET_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { group } from "mitata";
import EventEmitterNative from "node:events";
import { group } from "../runner.mjs";
export const implementations = [
{

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Pseudo RNG is derived from https://stackoverflow.com/a/424445

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,5 +1,5 @@
import { CString, dlopen, ptr } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;

View File

@@ -1,5 +1,5 @@
import { bench, group, run } from "mitata";
import { createRequire } from "node:module";
import { bench, group, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");

View File

@@ -1,5 +1,5 @@
import braces from "braces";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
// const iterations = 1000;
const iterations = 100;
@@ -10,16 +10,15 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
console.log(braces(complexPattern, { expand: true }));
function benchPattern(pattern, name) {
const _name = `${name} pattern: "${pattern}"`;
group({ name: _name, summary: true }, () => {
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
if (typeof Bun !== "undefined")
bench(`Bun (${_name})`, () => {
bench("Bun", () => {
for (let i = 0; i < iterations; i++) {
Bun.$.braces(pattern);
}
});
bench(`micromatch/braces ${_name}`, () => {
bench("micromatch/braces", () => {
for (let i = 0; i < iterations; i++) {
braces(pattern, { expand: true });
}

View File

@@ -1,5 +1,5 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);

View File

@@ -1,6 +1,6 @@
import fg from "fast-glob";
import { fdir } from "fdir";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const normalPattern = "*.ts";
const recursivePattern = "**/*.ts";

View File

@@ -1,5 +1,5 @@
import { gunzipSync, gzipSync } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const data = new TextEncoder().encode("Hello World!".repeat(9999));

View File

@@ -1,7 +1,7 @@
import { readFileSync } from "fs";
import { bench, run } from "mitata";
import { createRequire } from "module";
import { gunzipSync, gzipSync } from "zlib";
import { bench, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));

View File

@@ -0,0 +1,8 @@
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
const otherUint8Array = new Uint8Array(1024);
bench("Uint8Array.from(otherUint8Array)", () => Uint8Array.from(otherUint8Array));
run();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../../runner.mjs";
import { bench, run } from "mitata";
import {
arch,
cpus,

View File

@@ -1,3 +1,4 @@
import { bench, run } from "mitata";
import {
arch,
cpus,
@@ -18,7 +19,6 @@ import {
userInfo,
version,
} from "node:os";
import { bench, run } from "../../runner.mjs";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -13,9 +13,7 @@
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^1.0.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"

View File

@@ -1,19 +0,0 @@
import * as Mitata from "mitata";
import process from "node:process";
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
/** @param {Parameters<typeof Mitata["run"]>["0"]} opts */
export function run(opts = {}) {
if (asJSON) {
opts.format = "json";
}
return Mitata.run(opts);
}
export const bench = Mitata.bench;
export function group(_name, fn) {
return Mitata.group(fn);
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var comparator = (a, b) => a - b;
const numbers = [

View File

@@ -1,6 +1,6 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,5 +1,5 @@
import * as assert from "assert";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("deepEqual", () => {
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size) {
const latin1 = btoa("A".repeat(size));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("new Blob(['hello world'])", function () {
return new Blob(["hello world"]);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size, isToString) {
const base64Input = Buffer.alloc(size, "latin1").toString("base64");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
const first = Buffer.allocUnsafe(size);

View File

@@ -1,7 +1,7 @@
// @runtime bun,node,deno
import { Buffer } from "node:buffer";
import process from "node:process";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
var isBuffer = new Buffer(0);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {

View File

@@ -1,6 +1,6 @@
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");

View File

@@ -1,22 +1,22 @@
import Color from "color";
import tinycolor from "tinycolor2";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
for (const input of inputs) {
group(`${input}`, () => {
if (typeof Bun !== "undefined") {
bench(`Bun.color() (${input})`, () => {
bench("Bun.color()", () => {
Bun.color(input, "css");
});
}
bench(`color (${input})`, () => {
bench("color", () => {
Color(input).hex();
});
bench(`'tinycolor2' (${input})`, () => {
bench("'tinycolor2'", () => {
tinycolor(input).toHexString();
});
});

View File

@@ -1,6 +1,6 @@
import { allocUnsafe } from "bun";
import { readFileSync } from "fs";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
function polyfill(chunks) {
var size = 0;
@@ -41,16 +41,15 @@ const chunkGroups = [
];
for (const chunks of chunkGroups) {
const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`
group(name, () => {
bench(`Bun.concatArrayBuffers (${name})`, () => {
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
bench("Bun.concatArrayBuffers", () => {
Bun.concatArrayBuffers(chunks);
});
bench(`Uint8Array.set (${name})`, () => {
bench("Uint8Array.set", () => {
polyfill(chunks);
});
bench(`Uint8Array.set (uninitialized memory) (${name})`, () => {
bench("Uint8Array.set (uninitialized memory)", () => {
polyfillUninitialized(chunks);
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const json = {
login: "wongmjane",

View File

@@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import crypto from "node:crypto";

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("crypto.randomUUID()", () => {
return crypto.randomUUID();

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,6 +1,6 @@
// so it can run in environments without node module resolution
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);

View File

@@ -1,5 +1,5 @@
import fastDeepEquals from "fast-deep-equal/es6/index";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
// const Date = globalThis.Date;
function func1() {}
@@ -490,7 +490,7 @@ for (let { tests, description } of fixture) {
var expected;
group(describe, () => {
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
bench(`${describe}: ${equalsFn.name}`, () => {
bench(equalsFn.name, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const properties = {
closed: {

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,10 +1,10 @@
import { dns } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
bench(`${backend} (${name})`, fn(backend));
bench(backend, fn(backend));
});
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const encoder = new TextEncoder();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
@@ -92,21 +92,24 @@ function reactEscapeHtml(string) {
// }
for (let input of [
"long string, nothing to escape... ".repeat(9999999 * 3),
`long string, nothing to escape... `.repeat(9999999 * 3),
FIXTURE.repeat(8000),
// "[unicode]" + FIXTURE_WITH_UNICODE,
]) {
const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`
group(
{
summary: true,
name
name:
`"` +
input.substring(0, Math.min(input.length, 32)) +
`"` +
` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`,
},
() => {
// bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input));
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
// bench(`he.escape`, () => heEscape(input));
bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input));
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
},
);
}

View File

@@ -1,5 +1,5 @@
import { dlopen } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const types = {
returns_true: {

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// pure JS implementation will optimze this out
bench("new Headers", function () {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",

View File

@@ -1,8 +0,0 @@
import { bench, run } from "../runner.mjs";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
const otherUint8Array = new Uint8Array(1024);
bench("Uint8Array.from(otherUint8Array)", () => Uint8Array.from(otherUint8Array));
run();

View File

@@ -1,6 +1,6 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("../runner.mjs").then(({ bench, run }) => {
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
@@ -36,9 +36,7 @@ import("../runner.mjs").then(({ bench, run }) => {
a: 1,
};
const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {};
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
@@ -51,11 +49,10 @@ import("../runner.mjs").then(({ bench, run }) => {
return 42;
};
log(module.exports);
log(module.exports, module.exports());
console.log(module.exports, module.exports());
queueMicrotask(() => {
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,5 +1,5 @@
import { IncomingMessage } from "node:http";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const headers = {
date: "Mon, 06 Nov 2023 05:12:49 GMT",

View File

@@ -1,6 +1,6 @@
// @runtime node, bun
import * as vm from "node:vm";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const context = {
animal: "cat",

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const obj = {
a: 1,

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { posix } from "path";
import { bench, run } from "../runner.mjs";
const pathConfigurations = [
"",

View File

@@ -1,6 +1,6 @@
import { pbkdf2 } from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const password = "password";
const salt = "salt";

View File

@@ -1,5 +1,5 @@
import { peek } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
let pending = Bun.sleep(1000);
let resolved = Promise.resolve(1);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

Some files were not shown because too many files have changed in this diff Show More