Compare commits

..

7 Commits

Author SHA1 Message Date
Meghan Denny
1e682dec6a this one is safe 2024-10-25 17:16:07 -07:00
Meghan Denny
137c7e1db8 implement retry option 2024-10-25 17:16:07 -07:00
Meghan Denny
3604c45d3b these parameters were backwards 2024-10-25 17:16:07 -07:00
Meghan Denny
62f3f88aa0 misc cleanup 2024-10-25 17:16:07 -07:00
Meghan Denny
be0b3b9fd9 bun-types: make all the test modifiers have the doc comment too 2024-10-25 17:16:07 -07:00
Meghan Denny
fdb6ef0efa bindings: make messageWithTypeAndLevel a ConsoleObject method 2024-10-25 17:16:07 -07:00
Meghan Denny
868aa95ec2 bun:test: implement test.failing 2024-10-25 17:16:07 -07:00
668 changed files with 19886 additions and 101626 deletions

View File

@@ -7,32 +7,85 @@
import { writeFileSync } from "node:fs";
import { join } from "node:path";
import {
getCanaryRevision,
getChangedFiles,
getCommit,
getCommitMessage,
getLastSuccessfulBuild,
getMainBranch,
getRepositoryOwner,
getTargetBranch,
isBuildkite,
isFork,
isMainBranch,
isMergeQueue,
printEnvironment,
spawnSafe,
} from "../scripts/utils.mjs";
function getEnv(name, required = true) {
const value = process.env[name];
if (!value && required) {
throw new Error(`Missing environment variable: ${name}`);
}
return value;
}
function getRepository() {
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
if (!match) {
throw new Error(`Unsupported repository: ${url}`);
}
const [, owner, repo] = match;
return `${owner}/${repo}`;
}
function getCommit() {
return getEnv("BUILDKITE_COMMIT");
}
function getBranch() {
return getEnv("BUILDKITE_BRANCH");
}
function getMainBranch() {
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
}
function isFork() {
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
return !!repository && repository !== getEnv("BUILDKITE_REPO");
}
function isMainBranch() {
return getBranch() === getMainBranch() && !isFork();
}
function isMergeQueue() {
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
}
function isPullRequest() {
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
}
async function getChangedFiles() {
const repository = getRepository();
const head = getCommit();
const base = isMainBranch() ? `${head}^1` : getMainBranch();
try {
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
if (response.ok) {
const { files } = await response.json();
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
}
} catch (error) {
console.error(error);
}
}
function isDocumentation(filename) {
return /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/.test(filename);
}
function isTest(filename) {
return /^test/.test(filename);
}
function toYaml(obj, indent = 0) {
const spaces = " ".repeat(indent);
let result = "";
for (const [key, value] of Object.entries(obj)) {
if (value === undefined) {
continue;
}
if (value === null) {
result += `${spaces}${key}: null\n`;
continue;
@@ -72,20 +125,14 @@ function toYaml(obj, indent = 0) {
return result;
}
function getPipeline(buildId) {
function getPipeline() {
/**
* Helpers
*/
const getKey = platform => {
const { os, arch, abi, baseline } = platform;
const { os, arch, baseline } = platform;
if (abi) {
if (baseline) {
return `${os}-${arch}-${abi}-baseline`;
}
return `${os}-${arch}-${abi}`;
}
if (baseline) {
return `${os}-${arch}-baseline`;
}
@@ -94,15 +141,13 @@ function getPipeline(buildId) {
};
const getLabel = platform => {
const { os, arch, abi, baseline, release } = platform;
let label = release ? `:${os}: ${release} ${arch}` : `:${os}: ${arch}`;
if (abi) {
label += `-${abi}`;
}
const { os, arch, baseline } = platform;
if (baseline) {
label += `-baseline`;
return `:${os}: ${arch}-baseline`;
}
return label;
return `:${os}: ${arch}`;
};
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
@@ -136,16 +181,15 @@ function getPipeline(buildId) {
*/
const getBuildVendorStep = platform => {
const { os, arch, abi, baseline } = platform;
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-vendor`,
label: `build-vendor`,
label: `${getLabel(platform)} - build-vendor`,
agents: {
os,
arch,
abi,
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
@@ -157,16 +201,15 @@ function getPipeline(buildId) {
};
const getBuildCppStep = platform => {
const { os, arch, abi, baseline } = platform;
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-cpp`,
label: `build-cpp`,
label: `${getLabel(platform)} - build-cpp`,
agents: {
os,
arch,
abi,
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
@@ -179,12 +222,12 @@ function getPipeline(buildId) {
};
const getBuildZigStep = platform => {
const { os, arch, abi, baseline } = platform;
const toolchain = getKey(platform);
const { os, arch, baseline } = platform;
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
return {
key: `${getKey(platform)}-build-zig`,
label: `build-zig`,
label: `${getLabel(platform)} - build-zig`,
agents: {
queue: "build-zig",
},
@@ -198,11 +241,11 @@ function getPipeline(buildId) {
};
const getBuildBunStep = platform => {
const { os, arch, abi, baseline } = platform;
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-bun`,
label: `build-bun`,
label: `${getLabel(platform)} - build-bun`,
depends_on: [
`${getKey(platform)}-build-vendor`,
`${getKey(platform)}-build-cpp`,
@@ -211,7 +254,6 @@ function getPipeline(buildId) {
agents: {
os,
arch,
abi,
queue: `build-${os}`,
},
retry: getRetry(),
@@ -225,22 +267,22 @@ function getPipeline(buildId) {
};
const getTestBunStep = platform => {
const { os, arch, abi, distro, release } = platform;
const { os, arch, distro, release } = platform;
let name;
if (os === "darwin" || os === "windows") {
name = getLabel({ ...platform, release });
name = getLabel(platform);
} else {
name = getLabel({ ...platform, os: distro, release });
name = getLabel({ ...platform, os: distro });
}
let agents;
if (os === "darwin") {
agents = { os, arch, abi, queue: `test-darwin` };
agents = { os, arch, queue: `test-darwin` };
} else if (os === "windows") {
agents = { os, arch, abi, robobun: true };
agents = { os, arch, robobun: true };
} else {
agents = { os, arch, abi, distro, release, robobun: true };
agents = { os, arch, distro, release, robobun: true };
}
let command;
@@ -257,34 +299,16 @@ function getPipeline(buildId) {
parallelism = 10;
}
let depends;
let env;
if (buildId) {
env = {
BUILDKITE_ARTIFACT_BUILD_ID: buildId,
};
} else {
depends = [`${getKey(platform)}-build-bun`];
}
let retry;
if (os !== "windows") {
// When the runner fails on Windows, Buildkite only detects an exit code of 1.
// Because of this, we don't know if the run was fatal, or soft-failed.
retry = getRetry();
}
return {
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
label: `${name} - test-bun`,
depends_on: depends,
depends_on: [`${getKey(platform)}-build-bun`],
agents,
retry,
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
soft_fail: isMainBranch(),
parallelism,
command,
env,
};
};
@@ -296,10 +320,8 @@ function getPipeline(buildId) {
{ os: "darwin", arch: "aarch64" },
{ os: "darwin", arch: "x64" },
{ os: "linux", arch: "aarch64" },
// { os: "linux", arch: "aarch64", abi: "musl" }, // TODO:
{ os: "linux", arch: "x64" },
{ os: "linux", arch: "x64", baseline: true },
// { os: "linux", arch: "x64", abi: "musl" }, // TODO:
{ os: "windows", arch: "x64" },
{ os: "windows", arch: "x64", baseline: true },
];
@@ -312,14 +334,12 @@ function getPipeline(buildId) {
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
// { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
// { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
];
@@ -330,25 +350,18 @@ function getPipeline(buildId) {
...buildPlatforms.map(platform => {
const { os, arch, baseline } = platform;
let steps = [
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
];
if (!buildId) {
steps.unshift(
return {
key: getKey(platform),
group: getLabel(platform),
steps: [
getBuildVendorStep(platform),
getBuildCppStep(platform),
getBuildZigStep(platform),
getBuildBunStep(platform),
);
}
return {
key: getKey(platform),
group: getLabel(platform),
steps,
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
],
};
}),
],
@@ -356,98 +369,31 @@ function getPipeline(buildId) {
}
async function main() {
printEnvironment();
console.log("Checking environment...");
console.log(" - Repository:", getRepository());
console.log(" - Branch:", getBranch());
console.log(" - Commit:", getCommit());
console.log(" - Is Main Branch:", isMainBranch());
console.log(" - Is Merge Queue:", isMergeQueue());
console.log(" - Is Pull Request:", isPullRequest());
console.log("Checking last successful build...");
const lastBuild = await getLastSuccessfulBuild();
if (lastBuild) {
const { id, path, commit_id: commit } = lastBuild;
console.log(" - Build ID:", id);
console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString());
console.log(" - Commit:", commit);
} else {
console.log(" - No build found");
}
const changedFiles = await getChangedFiles();
if (changedFiles) {
console.log(
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
);
let changedFiles;
if (!isFork()) {
console.log("Checking changed files...");
const baseRef = getCommit();
console.log(" - Base Ref:", baseRef);
const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch();
console.log(" - Head Ref:", headRef);
changedFiles = await getChangedFiles(undefined, baseRef, headRef);
if (changedFiles) {
if (changedFiles.length) {
changedFiles.forEach(filename => console.log(` - ${filename}`));
} else {
console.log(" - No changed files");
}
}
}
const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename);
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
console.log("Checking if CI should be forced...");
let forceBuild;
{
const message = getCommitMessage();
const match = /\[(force ci|ci force|ci force build)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
forceBuild = true;
}
}
console.log("Checking if CI should be skipped...");
if (!forceBuild) {
const message = getCommitMessage();
const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
if (changedFiles.every(filename => isDocumentation(filename))) {
console.log("Since changed files are only documentation, skipping...");
return;
}
if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) {
console.log(" - Yes, because all changed files are documentation");
return;
if (changedFiles.every(filename => isTest(filename) || isDocumentation(filename))) {
// TODO: console.log("Since changed files contain tests, skipping build...");
}
}
console.log("Checking if build should be skipped...");
let skipBuild;
if (!forceBuild) {
const message = getCommitMessage();
const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
skipBuild = true;
}
if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) {
console.log(" - Yes, because all changed files are tests or documentation");
skipBuild = true;
}
}
console.log("Checking if build is a named release...");
let buildRelease;
{
const message = getCommitMessage();
const match = /\[(release|release build|build release)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
buildRelease = true;
}
}
console.log("Generating pipeline...");
const pipeline = getPipeline(lastBuild && skipBuild && !forceBuild ? lastBuild.id : undefined);
const pipeline = getPipeline();
const content = toYaml(pipeline);
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
writeFileSync(contentPath, content);
@@ -455,15 +401,6 @@ async function main() {
console.log("Generated pipeline:");
console.log(" - Path:", contentPath);
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
if (isBuildkite) {
console.log("Setting canary revision...");
const canaryRevision = buildRelease ? 0 : await getCanaryRevision();
await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]);
console.log("Uploading pipeline...");
await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]);
}
}
await main();

View File

@@ -2,10 +2,106 @@
set -eo pipefail
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_jq() {
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
}
function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_node() {
assert_command "node" "node" "https://nodejs.org/en/download/"
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_release() {
if [ "$RELEASE" == "1" ]; then
run_command buildkite-agent meta-data set canary "0"
fi
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ]; then
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
if [ "$tag" == "null" ]; then
canary="1"
else
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
if [ "$revision" == "null" ]; then
canary="1"
else
canary="$revision"
fi
fi
run_command buildkite-agent meta-data set canary "$canary"
fi
}
function upload_buildkite_pipeline() {
local path="$1"
if [ ! -f "$path" ]; then
echo "error: Cannot find pipeline: $path"
exit 1
fi
run_command buildkite-agent pipeline upload "$path"
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_node
assert_release
assert_canary
run_command node ".buildkite/ci.mjs"
if [ -f ".buildkite/ci.yml" ]; then
upload_buildkite_pipeline ".buildkite/ci.yml"
fi

View File

@@ -162,27 +162,6 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local artifact_path="${commit}"
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
assert_main
assert_buildkite_agent
@@ -227,7 +206,6 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -11,8 +11,5 @@ packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
.zig-cache
zig-out
build
vendor
node_modules
zig-cache
zig-out

2
.gitattributes vendored
View File

@@ -49,5 +49,3 @@ vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -83,26 +83,6 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -112,7 +92,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -126,40 +106,6 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

19
.gitignore vendored
View File

@@ -26,7 +26,6 @@
*.db
*.dmg
*.dSYM
*.generated.ts
*.jsb
*.lib
*.log
@@ -54,8 +53,8 @@
/test-report.md
/test.js
/test.ts
/test.zig
/testdir
/test.zig
build
build.ninja
bun-binary
@@ -112,10 +111,8 @@ pnpm-lock.yaml
profile.json
README.md.template
release/
scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/debug-bindings-obj
@@ -134,13 +131,16 @@ src/runtime.version
src/tests.zig
test.txt
test/js/bun/glob/fixtures
test/node.js/upstream
tsconfig.tsbuildinfo
txt.js
x64
yarn.lock
zig-cache
zig-out
test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
# Dependencies
/vendor
@@ -148,23 +148,22 @@ zig-out
# Dependencies (before CMake)
# These can be removed in the far future
/src/bun.js/WebKit
/src/deps/WebKit
/src/deps/boringssl
/src/deps/brotli
/src/deps/c*ares
/src/deps/lol*html
/src/deps/libarchive
/src/deps/libdeflate
/src/deps/libuv
/src/deps/lol*html
/src/deps/ls*hpack
/src/deps/mimalloc
/src/deps/picohttpparser
/src/deps/tinycc
/src/deps/WebKit
/src/deps/zig
/src/deps/zlib
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml
*.sock

58
.vscode/launch.json generated vendored
View File

@@ -22,8 +22,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -39,8 +37,6 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -62,8 +58,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -79,8 +73,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -96,8 +88,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -113,8 +103,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -131,8 +119,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -154,8 +140,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -176,8 +160,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -196,8 +178,6 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -212,8 +192,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -228,8 +206,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -244,8 +220,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -261,8 +235,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -283,8 +255,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -306,8 +276,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -323,8 +291,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -340,8 +306,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -357,8 +321,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -374,8 +336,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -392,8 +352,6 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -415,8 +373,6 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -437,8 +393,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -454,8 +408,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -470,8 +422,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -487,8 +437,6 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -508,8 +456,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -524,8 +470,6 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -1238,8 +1182,6 @@
},
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [

View File

@@ -11,7 +11,7 @@ Bun currently requires `glibc >=2.32` in development which means if you're on Ub
Using your system's package manager, install Bun's dependencies:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
@@ -60,7 +60,7 @@ $ brew install bun
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
$ brew install llvm@18
@@ -97,7 +97,7 @@ $ which clang-16
If not, run this to manually add it:
{% codetabs group="os" %}
{% codetabs %}
```bash#macOS (Homebrew)
# use fish_add_path if you're using fish
@@ -285,17 +285,17 @@ If you see this error when compiling, run:
$ xcode-select --install
```
### Cannot find `libatomic.a`
## Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
### ccache conflicts with building TinyCC on macOS
## ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
@@ -303,9 +303,3 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`

2
LATEST
View File

@@ -1 +1 @@
1.1.34
1.1.33

View File

@@ -1,5 +1,5 @@
<p align="center">
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("sync", () => {});
bench("async", async () => {});

Binary file not shown.

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { copyFileSync, statSync, writeFileSync } from "node:fs";
import { bench, run } from "../runner.mjs";
function runner(ready) {
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const MAP_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const SET_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { group } from "mitata";
import EventEmitterNative from "node:events";
import { group } from "../runner.mjs";
export const implementations = [
{

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
import { groupForEmitter } from "./implementations.mjs";
// Pseudo RNG is derived from https://stackoverflow.com/a/424445

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const count = 100;

View File

@@ -1,5 +1,5 @@
import { CString, dlopen, ptr } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;

View File

@@ -1,5 +1,5 @@
import { bench, group, run } from "mitata";
import { createRequire } from "node:module";
import { bench, group, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");

View File

@@ -1,5 +1,5 @@
import braces from "braces";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
// const iterations = 1000;
const iterations = 100;
@@ -10,16 +10,15 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
console.log(braces(complexPattern, { expand: true }));
function benchPattern(pattern, name) {
const _name = `${name} pattern: "${pattern}"`;
group({ name: _name, summary: true }, () => {
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
if (typeof Bun !== "undefined")
bench(`Bun (${_name})`, () => {
bench("Bun", () => {
for (let i = 0; i < iterations; i++) {
Bun.$.braces(pattern);
}
});
bench(`micromatch/braces ${_name}`, () => {
bench("micromatch/braces", () => {
for (let i = 0; i < iterations; i++) {
braces(pattern, { expand: true });
}

View File

@@ -1,5 +1,5 @@
import micromatch from "micromatch";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);

View File

@@ -1,6 +1,6 @@
import fg from "fast-glob";
import { fdir } from "fdir";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const normalPattern = "*.ts";
const recursivePattern = "**/*.ts";

View File

@@ -1,5 +1,5 @@
import { gunzipSync, gzipSync } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "mitata";
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const data = new TextEncoder().encode("Hello World!".repeat(9999));

View File

@@ -1,7 +1,7 @@
import { readFileSync } from "fs";
import { bench, run } from "mitata";
import { createRequire } from "module";
import { gunzipSync, gzipSync } from "zlib";
import { bench, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../../runner.mjs";
import { bench, run } from "mitata";
import {
arch,
cpus,

View File

@@ -1,3 +1,4 @@
import { bench, run } from "mitata";
import {
arch,
cpus,
@@ -18,7 +19,6 @@ import {
userInfo,
version,
} from "node:os";
import { bench, run } from "../../runner.mjs";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -13,9 +13,7 @@
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^1.0.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"mitata": "^0.1.6",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"

View File

@@ -1,19 +0,0 @@
import * as Mitata from "mitata";
import process from "node:process";
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
/** @param {Parameters<typeof Mitata["run"]>["0"]} opts */
export function run(opts = {}) {
if (asJSON) {
opts.format = "json";
}
return Mitata.run(opts);
}
export const bench = Mitata.bench;
export function group(_name, fn) {
return Mitata.group(fn);
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var comparator = (a, b) => a - b;
const numbers = [

View File

@@ -1,6 +1,6 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,5 +1,5 @@
import * as assert from "assert";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("deepEqual", () => {
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size) {
const latin1 = btoa("A".repeat(size));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
bench("new Blob(['hello world'])", function () {
return new Blob(["hello world"]);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
function makeBenchmark(size, isToString) {
const base64Input = Buffer.alloc(size, "latin1").toString("base64");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
const first = Buffer.allocUnsafe(size);

View File

@@ -1,7 +1,7 @@
// @runtime bun,node,deno
import { Buffer } from "node:buffer";
import process from "node:process";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
var isBuffer = new Buffer(0);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {

View File

@@ -1,6 +1,6 @@
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");

View File

@@ -1,22 +1,22 @@
import Color from "color";
import tinycolor from "tinycolor2";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
for (const input of inputs) {
group(`${input}`, () => {
if (typeof Bun !== "undefined") {
bench(`Bun.color() (${input})`, () => {
bench("Bun.color()", () => {
Bun.color(input, "css");
});
}
bench(`color (${input})`, () => {
bench("color", () => {
Color(input).hex();
});
bench(`'tinycolor2' (${input})`, () => {
bench("'tinycolor2'", () => {
tinycolor(input).toHexString();
});
});

View File

@@ -1,6 +1,6 @@
import { allocUnsafe } from "bun";
import { readFileSync } from "fs";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
function polyfill(chunks) {
var size = 0;
@@ -41,16 +41,15 @@ const chunkGroups = [
];
for (const chunks of chunkGroups) {
const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`
group(name, () => {
bench(`Bun.concatArrayBuffers (${name})`, () => {
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
bench("Bun.concatArrayBuffers", () => {
Bun.concatArrayBuffers(chunks);
});
bench(`Uint8Array.set (${name})`, () => {
bench("Uint8Array.set", () => {
polyfill(chunks);
});
bench(`Uint8Array.set (uninitialized memory) (${name})`, () => {
bench("Uint8Array.set (uninitialized memory)", () => {
polyfillUninitialized(chunks);
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const json = {
login: "wongmjane",

View File

@@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
import crypto from "node:crypto";

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("crypto.randomUUID()", () => {
return crypto.randomUUID();

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,6 +1,6 @@
// so it can run in environments without node module resolution
import crypto from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);

View File

@@ -1,5 +1,5 @@
import fastDeepEquals from "fast-deep-equal/es6/index";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
// const Date = globalThis.Date;
function func1() {}
@@ -490,7 +490,7 @@ for (let { tests, description } of fixture) {
var expected;
group(describe, () => {
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
bench(`${describe}: ${equalsFn.name}`, () => {
bench(equalsFn.name, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const properties = {
closed: {

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,10 +1,10 @@
import { dns } from "bun";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
bench(`${backend} (${name})`, fn(backend));
bench(backend, fn(backend));
});
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const encoder = new TextEncoder();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
@@ -92,21 +92,24 @@ function reactEscapeHtml(string) {
// }
for (let input of [
"long string, nothing to escape... ".repeat(9999999 * 3),
`long string, nothing to escape... `.repeat(9999999 * 3),
FIXTURE.repeat(8000),
// "[unicode]" + FIXTURE_WITH_UNICODE,
]) {
const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`
group(
{
summary: true,
name
name:
`"` +
input.substring(0, Math.min(input.length, 32)) +
`"` +
` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`,
},
() => {
// bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input));
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
// bench(`he.escape`, () => heEscape(input));
bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input));
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
},
);
}

View File

@@ -1,5 +1,5 @@
import { dlopen } from "bun:ffi";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const types = {
returns_true: {

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// pure JS implementation will optimze this out
bench("new Headers", function () {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var obj = {
"restApiRoot": "/api",

View File

@@ -1,6 +1,6 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("../runner.mjs").then(({ bench, run }) => {
import("./runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
@@ -36,9 +36,7 @@ import("../runner.mjs").then(({ bench, run }) => {
a: 1,
};
const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {};
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
@@ -51,11 +49,10 @@ import("../runner.mjs").then(({ bench, run }) => {
return 42;
};
log(module.exports);
log(module.exports, module.exports());
console.log(module.exports, module.exports());
queueMicrotask(() => {
log(
console.log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,5 +1,5 @@
import { IncomingMessage } from "node:http";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const headers = {
date: "Mon, 06 Nov 2023 05:12:49 GMT",

View File

@@ -1,6 +1,6 @@
// @runtime node, bun
import * as vm from "node:vm";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const context = {
animal: "cat",

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../runner.mjs";
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
const obj = {
a: 1,

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { posix } from "path";
import { bench, run } from "../runner.mjs";
const pathConfigurations = [
"",

View File

@@ -1,6 +1,6 @@
import { pbkdf2 } from "node:crypto";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const password = "password";
const salt = "salt";

View File

@@ -1,5 +1,5 @@
import { peek } from "bun";
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
let pending = Bun.sleep(1000);
let resolved = Promise.resolve(1);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
// This is a benchmark of the performance impact of using private properties.
bench("Polyfillprivate", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "mitata";
bench("process.cwd()", () => {
process.cwd();

View File

@@ -1,5 +1,5 @@
import { performance } from "perf_hooks";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("process.memoryUsage()", () => {
process.memoryUsage();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,6 +1,6 @@
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
import { renderToReadableStream } from "react-dom/server.browser";
import { bench, group, run } from "../runner.mjs";
import { bench, group, run } from "./runner.mjs";
const App = () => (
<div>

View File

@@ -1,7 +1,7 @@
import { createReadStream, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { sep } from "node:path";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
var short = (function () {
const text = "Hello World!";

View File

@@ -4,7 +4,7 @@ import { readdir } from "fs/promises";
import { relative, resolve } from "path";
import { argv } from "process";
import { fileURLToPath } from "url";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
if (dir.includes(process.cwd())) {
@@ -43,11 +43,8 @@ bench(`await readdir("${dir}", {recursive: false})`, async () => {
});
await run();
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
if (!process?.env?.BENCHMARK_RUNNER) {
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
if (count !== syncCount) {
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
}
if (count !== syncCount) {
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
}

View File

@@ -1,6 +1,6 @@
import { readFileSync } from "node:fs";
import { readFile } from "node:fs/promises";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
bench(`readFileSync(/tmp/404-not-found)`, () => {
try {

View File

@@ -1,5 +1,5 @@
import { realpathSync } from "node:fs";
import { bench, run } from "../runner.mjs";
import { bench, run } from "./runner.mjs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];

Some files were not shown because too many files have changed in this diff Show More