Compare commits

..

3 Commits

Author SHA1 Message Date
Jarred Sumner
4017fc04e1 Merge branch 'main' into jarred/fixy 2024-10-29 12:25:50 -07:00
Jarred Sumner
1ba4eab690 Merge branch 'main' into jarred/fixy 2024-10-24 21:21:16 -07:00
Jarred Sumner
0ebaa42d06 Fix crash with package-lock.json when using npm: aliases 2024-10-23 18:24:17 -07:00
774 changed files with 23827 additions and 47141 deletions

961
.buildkite/ci.mjs Executable file → Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,10 +2,106 @@
set -eo pipefail
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_jq() {
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
}
function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_node() {
assert_command "node" "node" "https://nodejs.org/en/download/"
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_release() {
if [ "$RELEASE" == "1" ]; then
run_command buildkite-agent meta-data set canary "0"
fi
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ]; then
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
if [ "$tag" == "null" ]; then
canary="1"
else
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
if [ "$revision" == "null" ]; then
canary="1"
else
canary="$revision"
fi
fi
run_command buildkite-agent meta-data set canary "$canary"
fi
}
function upload_buildkite_pipeline() {
local path="$1"
if [ ! -f "$path" ]; then
echo "error: Cannot find pipeline: $path"
exit 1
fi
run_command buildkite-agent pipeline upload "$path"
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_node
assert_release
assert_canary
run_command node ".buildkite/ci.mjs"
if [ -f ".buildkite/ci.yml" ]; then
upload_buildkite_pipeline ".buildkite/ci.yml"
fi

View File

@@ -162,27 +162,6 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local artifact_path="${commit}"
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
assert_main
assert_buildkite_agent
@@ -202,12 +181,6 @@ function create_release() {
bun-linux-x64-profile.zip
bun-linux-x64-baseline.zip
bun-linux-x64-baseline-profile.zip
bun-linux-aarch64-musl.zip
bun-linux-aarch64-musl-profile.zip
bun-linux-x64-musl.zip
bun-linux-x64-musl-profile.zip
bun-linux-x64-musl-baseline.zip
bun-linux-x64-musl-baseline-profile.zip
bun-windows-x64.zip
bun-windows-x64-profile.zip
bun-windows-x64-baseline.zip
@@ -233,7 +206,6 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -11,8 +11,5 @@ packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
.zig-cache
zig-out
build
vendor
node_modules
zig-cache
zig-out

View File

@@ -1,61 +0,0 @@
name: Update c-ares
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check c-ares version
id: check-version
run: |
CURRENT_VERSION=$(grep -oP 'CARES_VERSION\s+\K\S+' cmake/targets/BuildCares.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find current version in BuildCares.cmake"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ]; then
echo "Error: Could not fetch latest version from GitHub API"
exit 1
fi
if [ ${#LATEST_SHA} -ne 40 ]; then
echo "Error: Invalid SHA length"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
- name: Create PR if update needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH="update-cares-${GITHUB_SHA::8}"
git checkout -b "$BRANCH"
sed -i "s/CARES_VERSION\s\+[0-9a-f]\+/CARES_VERSION ${{ steps.check-version.outputs.latest }}/" cmake/targets/BuildCares.cmake
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add cmake/targets/BuildCares.cmake
git commit -m "deps: update c-ares to ${{ steps.check-version.outputs.latest }}"
git push origin "$BRANCH"
gh pr create \
--title "deps: update c-ares to ${LATEST_SHA::8}" \
--body "Updates c-ares from ${CURRENT_VERSION::8} to ${LATEST_SHA::8}" \
--base main \
--head "$BRANCH"

View File

@@ -1,61 +0,0 @@
name: Update libarchive
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libarchive version
id: check-version
run: |
CURRENT_VERSION=$(grep -oP 'LIBARCHIVE_VERSION\s+\K\S+' cmake/targets/BuildLibArchive.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find current version in BuildLibArchive.cmake"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ]; then
echo "Error: Could not fetch latest version from GitHub API"
exit 1
fi
if [ ${#LATEST_SHA} -ne 40 ]; then
echo "Error: Invalid SHA length"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
- name: Create PR if update needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH="update-libarchive-${GITHUB_SHA::8}"
git checkout -b "$BRANCH"
sed -i "s/LIBARCHIVE_VERSION\s\+[0-9a-f]\+/LIBARCHIVE_VERSION ${{ steps.check-version.outputs.latest }}/" cmake/targets/BuildLibArchive.cmake
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add cmake/targets/BuildLibArchive.cmake
git commit -m "deps: update libarchive to ${{ steps.check-version.outputs.latest }}"
git push origin "$BRANCH"
gh pr create \
--title "deps: update libarchive to ${LATEST_SHA::8}" \
--body "Updates libarchive from ${CURRENT_VERSION::8} to ${LATEST_SHA::8}" \
--base main \
--head "$BRANCH"

View File

@@ -1,61 +0,0 @@
name: Update libdeflate
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check libdeflate version
id: check-version
run: |
CURRENT_VERSION=$(grep -oP 'LIBDEFLATE_VERSION\s+\K\S+' cmake/targets/BuildLibDeflate.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find current version in BuildLibDeflate.cmake"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ]; then
echo "Error: Could not fetch latest version from GitHub API"
exit 1
fi
if [ ${#LATEST_SHA} -ne 40 ]; then
echo "Error: Invalid SHA length"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
- name: Create PR if update needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH="update-libdeflate-${GITHUB_SHA::8}"
git checkout -b "$BRANCH"
sed -i "s/LIBDEFLATE_VERSION\s\+[0-9a-f]\+/LIBDEFLATE_VERSION ${{ steps.check-version.outputs.latest }}/" cmake/targets/BuildLibDeflate.cmake
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add cmake/targets/BuildLibDeflate.cmake
git commit -m "deps: update libdeflate to ${{ steps.check-version.outputs.latest }}"
git push origin "$BRANCH"
gh pr create \
--title "deps: update libdeflate to ${LATEST_SHA::8}" \
--body "Updates libdeflate from ${CURRENT_VERSION::8} to ${LATEST_SHA::8}" \
--base main \
--head "$BRANCH"

View File

@@ -1,61 +0,0 @@
name: Update lolhtml
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lolhtml version
id: check-version
run: |
CURRENT_VERSION=$(grep -oP 'LOLHTML_VERSION\s+\K\S+' cmake/targets/BuildLolHtml.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find current version in BuildLolHtml.cmake"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ]; then
echo "Error: Could not fetch latest version from GitHub API"
exit 1
fi
if [ ${#LATEST_SHA} -ne 40 ]; then
echo "Error: Invalid SHA length"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
- name: Create PR if update needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH="update-lolhtml-${GITHUB_SHA::8}"
git checkout -b "$BRANCH"
sed -i "s/LOLHTML_VERSION\s\+[0-9a-f]\+/LOLHTML_VERSION ${{ steps.check-version.outputs.latest }}/" cmake/targets/BuildLolHtml.cmake
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add cmake/targets/BuildLolHtml.cmake
git commit -m "deps: update lolhtml to ${{ steps.check-version.outputs.latest }}"
git push origin "$BRANCH"
gh pr create \
--title "deps: update lolhtml to ${LATEST_SHA::8}" \
--body "Updates lolhtml from ${CURRENT_VERSION::8} to ${LATEST_SHA::8}" \
--base main \
--head "$BRANCH"

View File

@@ -1,61 +0,0 @@
name: Update lshpack
on:
schedule:
- cron: "0 0 * * 0"
workflow_dispatch:
jobs:
check-update:
runs-on: ubuntu-latest
permissions:
contents: write
pull-requests: write
steps:
- uses: actions/checkout@v4
- name: Check lshpack version
id: check-version
run: |
CURRENT_VERSION=$(grep -oP 'LSHPACK_VERSION\s+\K\S+' cmake/targets/BuildLshpack.cmake)
if [ -z "$CURRENT_VERSION" ]; then
echo "Error: Could not find current version in BuildLshpack.cmake"
exit 1
fi
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
if [ -z "$LATEST_SHA" ]; then
echo "Error: Could not fetch latest version from GitHub API"
exit 1
fi
if [ ${#LATEST_SHA} -ne 40 ]; then
echo "Error: Invalid SHA length"
exit 1
fi
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
- name: Create PR if update needed
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
BRANCH="update-lshpack-${GITHUB_SHA::8}"
git checkout -b "$BRANCH"
sed -i "s/LSHPACK_VERSION\s\+[0-9a-f]\+/LSHPACK_VERSION ${{ steps.check-version.outputs.latest }}/" cmake/targets/BuildLshpack.cmake
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
git add cmake/targets/BuildLshpack.cmake
git commit -m "deps: update lshpack to ${{ steps.check-version.outputs.latest }}"
git push origin "$BRANCH"
gh pr create \
--title "deps: update lshpack to ${LATEST_SHA::8}" \
--body "Updates lshpack from ${CURRENT_VERSION::8} to ${LATEST_SHA::8}" \
--base main \
--head "$BRANCH"

19
.gitignore vendored
View File

@@ -26,7 +26,6 @@
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
@@ -54,8 +53,8 @@
/test-report.md
/test.js
/test.ts
/test.zig
/testdir
/test.zig
build
build.ninja
bun-binary
@@ -112,10 +111,8 @@ pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/debug-bindings-obj
@@ -134,7 +131,6 @@ src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
@@ -146,9 +142,6 @@ test/node.js/upstream
scripts/env.local
*.generated.ts
src/bake/generated.ts
test/cli/install/registry/packages/publish-pkg-*
test/cli/install/registry/packages/@secret/publish-pkg-8
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
# Dependencies
/vendor
@@ -156,24 +149,22 @@ test/js/third_party/prisma/prisma/sqlite/dev.db-journal
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/lol*html
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml
*.sock
scratch*.{js,ts,tsx,cjs,mjs}

61
.vscode/launch.json generated vendored
View File

@@ -22,8 +22,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -39,8 +37,6 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -62,8 +58,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -79,8 +73,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -96,8 +88,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -113,8 +103,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -131,8 +119,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -154,8 +140,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -176,8 +160,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -196,8 +178,6 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -212,8 +192,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -224,15 +202,10 @@
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
// "BUN_INSPECT": "ws+unix:///var/folders/jk/8fzl9l5119598vsqrmphsw7m0000gn/T/tl15npi7qtf.sock?report=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -247,8 +220,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -264,8 +235,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -286,8 +255,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -309,8 +276,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -326,8 +291,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -343,8 +306,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -360,8 +321,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -377,8 +336,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -395,8 +352,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -418,8 +373,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -440,8 +393,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -457,8 +408,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -473,8 +422,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -490,8 +437,6 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -511,8 +456,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -527,8 +470,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -1241,8 +1182,6 @@
},
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [

View File

@@ -78,7 +78,7 @@
"prettier.prettierPath": "./node_modules/prettier",
// TypeScript
"typescript.tsdk": "node_modules/typescript/lib",
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode",
},

93
.vscode/tasks.json vendored
View File

@@ -2,57 +2,50 @@
"version": "2.0.0",
"tasks": [
{
"label": "Build Bun",
"type": "shell",
"command": "bun run build",
"group": {
"kind": "build",
"isDefault": true,
"type": "process",
"label": "Install Dependencies",
"command": "scripts/all-dependencies.sh",
"windows": {
"command": "scripts/all-dependencies.ps1",
},
"problemMatcher": [
{
"owner": "zig",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s+(.+)$",
"message": 1,
"loop": true,
},
],
},
{
"owner": "clang",
"fileLocation": ["relative", "${workspaceFolder}"],
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"message": 5,
},
{
"regexp": "^\\s*(.*)$",
"message": 1,
"loop": true,
},
],
},
],
"presentation": {
"reveal": "always",
"panel": "shared",
"clear": true,
"icon": {
"id": "arrow-down",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Setup Environment",
"dependsOn": ["Install Dependencies"],
"command": "scripts/setup.sh",
"windows": {
"command": "scripts/setup.ps1",
},
"icon": {
"id": "check",
},
"options": {
"cwd": "${workspaceFolder}",
},
},
{
"type": "process",
"label": "Build Bun",
"dependsOn": ["Setup Environment"],
"command": "bun",
"args": ["run", "build"],
"icon": {
"id": "gear",
},
"options": {
"cwd": "${workspaceFolder}",
},
"isBuildCommand": true,
"runOptions": {
"instanceLimit": 1,
"reevaluateOnRerun": true,
},
},
],

View File

@@ -11,7 +11,7 @@ Bun currently requires `glibc >=2.32` in development which means if you're on Ub
Using your system's package manager, install Bun's dependencies:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
@@ -60,7 +60,7 @@ $ brew install bun
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install llvm@18
@@ -97,7 +97,7 @@ $ which clang-16
If not, run this to manually add it:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
@@ -285,17 +285,17 @@ If you see this error when compiling, run:
$ xcode-select --install
```
### Cannot find `libatomic.a`
## Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
### ccache conflicts with building TinyCC on macOS
## ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
@@ -303,9 +303,3 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`

2
LATEST
View File

@@ -1 +1 @@
1.1.37
1.1.33

View File

@@ -1,5 +1,5 @@
<p align="center">
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

Binary file not shown.

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { copyFileSync, statSync, writeFileSync } from "node:fs";
import { bench, run } from "../runner.mjs";
function runner(ready) {
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const MAP_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const SET_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { group } from "mitata";
import EventEmitterNative from "node:events";
import { group } from "../runner.mjs";
export const implementations = [
{

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Pseudo RNG is derived from https://stackoverflow.com/a/424445

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,5 +1,5 @@
import { CString, dlopen, ptr } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;

View File

@@ -1,5 +1,5 @@
import { bench, group, run } from "mitata";
import { createRequire } from "node:module";
import { bench, group, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");

View File

@@ -1,5 +1,5 @@
import braces from "braces";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
// const iterations = 1000;
const iterations = 100;
@@ -10,16 +10,15 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
console.log(braces(complexPattern, { expand: true }));
function benchPattern(pattern, name) {
const _name = `${name} pattern: "${pattern}"`;
group({ name: _name, summary: true }, () => {
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
if (typeof Bun !== "undefined")
bench(`Bun (${_name})`, () => {
bench("Bun", () => {
for (let i = 0; i < iterations; i++) {
Bun.$.braces(pattern);
}
});
bench(`micromatch/braces ${_name}`, () => {
bench("micromatch/braces", () => {
for (let i = 0; i < iterations; i++) {
braces(pattern, { expand: true });
}

View File

@@ -1,5 +1,5 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);

View File

@@ -1,6 +1,6 @@
import fg from "fast-glob";
import { fdir } from "fdir";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const normalPattern = "*.ts";
const recursivePattern = "**/*.ts";

View File

@@ -1,5 +1,5 @@
import { gunzipSync, gzipSync } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const data = new TextEncoder().encode("Hello World!".repeat(9999));

View File

@@ -1,7 +1,7 @@
import { readFileSync } from "fs";
import { bench, run } from "mitata";
import { createRequire } from "module";
import { gunzipSync, gzipSync } from "zlib";
import { bench, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../../runner.mjs";
import { bench, run } from "mitata";
import {
arch,
cpus,

View File

@@ -1,3 +1,4 @@
import { bench, run } from "mitata";
import {
arch,
cpus,
@@ -18,7 +19,6 @@ import {
userInfo,
version,
} from "node:os";
import { bench, run } from "../../runner.mjs";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -13,9 +13,7 @@
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^1.0.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"

View File

@@ -1,19 +0,0 @@
import * as Mitata from "mitata";
import process from "node:process";
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
/** @param {Parameters<typeof Mitata["run"]>["0"]} opts */
export function run(opts = {}) {
if (asJSON) {
opts.format = "json";
}
return Mitata.run(opts);
}
export const bench = Mitata.bench;
export function group(_name, fn) {
return Mitata.group(fn);
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var comparator = (a, b) => a - b;
const numbers = [

View File

@@ -1,6 +1,6 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,5 +1,5 @@
import * as assert from "assert";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("deepEqual", () => {
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size) {
const latin1 = btoa("A".repeat(size));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("new Blob(['hello world'])", function () {
return new Blob(["hello world"]);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size, isToString) {
const base64Input = Buffer.alloc(size, "latin1").toString("base64");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
const first = Buffer.allocUnsafe(size);

View File

@@ -1,7 +1,7 @@
// @runtime bun,node,deno
import { Buffer } from "node:buffer";
import process from "node:process";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
var isBuffer = new Buffer(0);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {

View File

@@ -1,6 +1,6 @@
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");

View File

@@ -1,22 +1,22 @@
import Color from "color";
import tinycolor from "tinycolor2";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
for (const input of inputs) {
group(`${input}`, () => {
if (typeof Bun !== "undefined") {
bench(`Bun.color() (${input})`, () => {
bench("Bun.color()", () => {
Bun.color(input, "css");
});
}
bench(`color (${input})`, () => {
bench("color", () => {
Color(input).hex();
});
bench(`'tinycolor2' (${input})`, () => {
bench("'tinycolor2'", () => {
tinycolor(input).toHexString();
});
});

View File

@@ -1,6 +1,6 @@
import { allocUnsafe } from "bun";
import { readFileSync } from "fs";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
function polyfill(chunks) {
var size = 0;
@@ -41,16 +41,15 @@ const chunkGroups = [
];
for (const chunks of chunkGroups) {
const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`
group(name, () => {
bench(`Bun.concatArrayBuffers (${name})`, () => {
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
bench("Bun.concatArrayBuffers", () => {
Bun.concatArrayBuffers(chunks);
});
bench(`Uint8Array.set (${name})`, () => {
bench("Uint8Array.set", () => {
polyfill(chunks);
});
bench(`Uint8Array.set (uninitialized memory) (${name})`, () => {
bench("Uint8Array.set (uninitialized memory)", () => {
polyfillUninitialized(chunks);
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const json = {
login: "wongmjane",

View File

@@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import crypto from "node:crypto";

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("crypto.randomUUID()", () => {
return crypto.randomUUID();

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,6 +1,6 @@
// so it can run in environments without node module resolution
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);

View File

@@ -1,5 +1,5 @@
import fastDeepEquals from "fast-deep-equal/es6/index";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
// const Date = globalThis.Date;
function func1() {}
@@ -490,7 +490,7 @@ for (let { tests, description } of fixture) {
var expected;
group(describe, () => {
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
bench(`${describe}: ${equalsFn.name}`, () => {
bench(equalsFn.name, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const properties = {
closed: {

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,10 +1,10 @@
import { dns } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
bench(`${backend} (${name})`, fn(backend));
bench(backend, fn(backend));
});
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const encoder = new TextEncoder();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
@@ -92,21 +92,24 @@ function reactEscapeHtml(string) {
// }
for (let input of [
"long string, nothing to escape... ".repeat(9999999 * 3),
`long string, nothing to escape... `.repeat(9999999 * 3),
FIXTURE.repeat(8000),
// "[unicode]" + FIXTURE_WITH_UNICODE,
]) {
const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`
group(
{
summary: true,
name
name:
`"` +
input.substring(0, Math.min(input.length, 32)) +
`"` +
` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`,
},
() => {
// bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input));
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
// bench(`he.escape`, () => heEscape(input));
bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input));
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
},
);
}

View File

@@ -1,5 +1,5 @@
import { dlopen } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const types = {
returns_true: {

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// pure JS implementation will optimze this out
bench("new Headers", function () {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",

View File

@@ -1,6 +1,6 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("../runner.mjs").then(({ bench, run }) => {
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
@@ -36,9 +36,7 @@ import("../runner.mjs").then(({ bench, run }) => {
a: 1,
};
const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {};
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
@@ -51,11 +49,10 @@ import("../runner.mjs").then(({ bench, run }) => {
return 42;
};
log(module.exports);
log(module.exports, module.exports());
console.log(module.exports, module.exports());
queueMicrotask(() => {
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,5 +1,5 @@
import { IncomingMessage } from "node:http";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const headers = {
date: "Mon, 06 Nov 2023 05:12:49 GMT",

View File

@@ -1,6 +1,6 @@
// @runtime node, bun
import * as vm from "node:vm";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const context = {
animal: "cat",

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const obj = {
a: 1,

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { posix } from "path";
import { bench, run } from "../runner.mjs";
const pathConfigurations = [
"",

View File

@@ -1,6 +1,6 @@
import { pbkdf2 } from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const password = "password";
const salt = "salt";

View File

@@ -1,5 +1,5 @@
import { peek } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
let pending = Bun.sleep(1000);
let resolved = Promise.resolve(1);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// This is a benchmark of the performance impact of using private properties.
bench("Polyfillprivate", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("process.cwd()", () => {
process.cwd();

View File

@@ -1,5 +1,5 @@
import { performance } from "perf_hooks";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("process.memoryUsage()", () => {
process.memoryUsage();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,6 +1,6 @@
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
import { renderToReadableStream } from "react-dom/server.browser";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const App = () => (
<div>

View File

@@ -1,7 +1,7 @@
import { createReadStream, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { sep } from "node:path";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {

Some files were not shown because too many files have changed in this diff Show More