mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
Merge branch 'ben/fix-node-napi-tests' into kai/fix-node-napi-tests
This commit is contained in:
@@ -7,113 +7,22 @@
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
function getEnv(name, required = true) {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!value && required) {
|
||||
throw new Error(`Missing environment variable: ${name}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function getRepository() {
|
||||
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
|
||||
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
|
||||
if (!match) {
|
||||
throw new Error(`Unsupported repository: ${url}`);
|
||||
}
|
||||
const [, owner, repo] = match;
|
||||
return `${owner}/${repo}`;
|
||||
}
|
||||
|
||||
function getCommit() {
|
||||
return getEnv("BUILDKITE_COMMIT");
|
||||
}
|
||||
|
||||
function getCommitMessage() {
|
||||
return getEnv("BUILDKITE_MESSAGE", false) || "";
|
||||
}
|
||||
|
||||
function getBranch() {
|
||||
return getEnv("BUILDKITE_BRANCH");
|
||||
}
|
||||
|
||||
function getMainBranch() {
|
||||
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
|
||||
}
|
||||
|
||||
function isFork() {
|
||||
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
|
||||
return !!repository && repository !== getEnv("BUILDKITE_REPO");
|
||||
}
|
||||
|
||||
function isMainBranch() {
|
||||
return getBranch() === getMainBranch() && !isFork();
|
||||
}
|
||||
|
||||
function isMergeQueue() {
|
||||
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
|
||||
}
|
||||
|
||||
function isPullRequest() {
|
||||
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
|
||||
}
|
||||
|
||||
async function getChangedFiles() {
|
||||
const repository = getRepository();
|
||||
const head = getCommit();
|
||||
const base = `${head}^1`;
|
||||
|
||||
try {
|
||||
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
|
||||
if (response.ok) {
|
||||
const { files } = await response.json();
|
||||
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
function getBuildUrl() {
|
||||
return getEnv("BUILDKITE_BUILD_URL");
|
||||
}
|
||||
|
||||
async function getBuildIdWithArtifacts() {
|
||||
let depth = 0;
|
||||
let url = getBuildUrl();
|
||||
|
||||
while (url) {
|
||||
const response = await fetch(`${url}.json`, {
|
||||
headers: { "Accept": "application/json" },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { id, state, prev_branch_build: lastBuild, steps } = await response.json();
|
||||
if (depth++) {
|
||||
if (state === "failed" || state === "passed") {
|
||||
const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun"));
|
||||
if (buildSteps.length) {
|
||||
if (buildSteps.every(({ outcome }) => outcome === "passed")) {
|
||||
return id;
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!lastBuild) {
|
||||
return;
|
||||
}
|
||||
|
||||
url = url.replace(/\/builds\/[0-9]+/, `/builds/${lastBuild["number"]}`);
|
||||
}
|
||||
}
|
||||
import {
|
||||
getCanaryRevision,
|
||||
getChangedFiles,
|
||||
getCommit,
|
||||
getCommitMessage,
|
||||
getLastSuccessfulBuild,
|
||||
getMainBranch,
|
||||
getTargetBranch,
|
||||
isBuildkite,
|
||||
isFork,
|
||||
isMainBranch,
|
||||
isMergeQueue,
|
||||
printEnvironment,
|
||||
spawnSafe,
|
||||
startGroup,
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
function toYaml(obj, indent = 0) {
|
||||
const spaces = " ".repeat(indent);
|
||||
@@ -169,8 +78,14 @@ function getPipeline(buildId) {
|
||||
*/
|
||||
|
||||
const getKey = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
|
||||
if (abi) {
|
||||
if (baseline) {
|
||||
return `${os}-${arch}-${abi}-baseline`;
|
||||
}
|
||||
return `${os}-${arch}-${abi}`;
|
||||
}
|
||||
if (baseline) {
|
||||
return `${os}-${arch}-baseline`;
|
||||
}
|
||||
@@ -179,8 +94,11 @@ function getPipeline(buildId) {
|
||||
};
|
||||
|
||||
const getLabel = platform => {
|
||||
const { os, arch, baseline, release } = platform;
|
||||
const { os, arch, abi, baseline, release } = platform;
|
||||
let label = release ? `:${os}: ${release} ${arch}` : `:${os}: ${arch}`;
|
||||
if (abi) {
|
||||
label += `-${abi}`;
|
||||
}
|
||||
if (baseline) {
|
||||
label += `-baseline`;
|
||||
}
|
||||
@@ -218,15 +136,16 @@ function getPipeline(buildId) {
|
||||
*/
|
||||
|
||||
const getBuildVendorStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-vendor`,
|
||||
label: `${getLabel(platform)} - build-vendor`,
|
||||
label: `build-vendor`,
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
queue: `build-${os}`,
|
||||
abi,
|
||||
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
@@ -238,15 +157,16 @@ function getPipeline(buildId) {
|
||||
};
|
||||
|
||||
const getBuildCppStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-cpp`,
|
||||
label: `${getLabel(platform)} - build-cpp`,
|
||||
label: `build-cpp`,
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
queue: `build-${os}`,
|
||||
abi,
|
||||
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
@@ -259,12 +179,12 @@ function getPipeline(buildId) {
|
||||
};
|
||||
|
||||
const getBuildZigStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
const toolchain = getKey(platform);
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-zig`,
|
||||
label: `${getLabel(platform)} - build-zig`,
|
||||
label: `build-zig`,
|
||||
agents: {
|
||||
queue: "build-zig",
|
||||
},
|
||||
@@ -278,11 +198,11 @@ function getPipeline(buildId) {
|
||||
};
|
||||
|
||||
const getBuildBunStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-bun`,
|
||||
label: `${getLabel(platform)} - build-bun`,
|
||||
label: `build-bun`,
|
||||
depends_on: [
|
||||
`${getKey(platform)}-build-vendor`,
|
||||
`${getKey(platform)}-build-cpp`,
|
||||
@@ -291,6 +211,7 @@ function getPipeline(buildId) {
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
abi,
|
||||
queue: `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
@@ -304,7 +225,7 @@ function getPipeline(buildId) {
|
||||
};
|
||||
|
||||
const getTestBunStep = platform => {
|
||||
const { os, arch, distro, release } = platform;
|
||||
const { os, arch, abi, distro, release } = platform;
|
||||
|
||||
let name;
|
||||
if (os === "darwin" || os === "windows") {
|
||||
@@ -315,11 +236,11 @@ function getPipeline(buildId) {
|
||||
|
||||
let agents;
|
||||
if (os === "darwin") {
|
||||
agents = { os, arch, queue: `test-darwin` };
|
||||
agents = { os, arch, abi, queue: `test-darwin` };
|
||||
} else if (os === "windows") {
|
||||
agents = { os, arch, robobun: true };
|
||||
agents = { os, arch, abi, robobun: true };
|
||||
} else {
|
||||
agents = { os, arch, distro, release, robobun: true };
|
||||
agents = { os, arch, abi, distro, release, robobun: true };
|
||||
}
|
||||
|
||||
let command;
|
||||
@@ -375,8 +296,10 @@ function getPipeline(buildId) {
|
||||
{ os: "darwin", arch: "aarch64" },
|
||||
{ os: "darwin", arch: "x64" },
|
||||
{ os: "linux", arch: "aarch64" },
|
||||
// { os: "linux", arch: "aarch64", abi: "musl" }, // TODO:
|
||||
{ os: "linux", arch: "x64" },
|
||||
{ os: "linux", arch: "x64", baseline: true },
|
||||
// { os: "linux", arch: "x64", abi: "musl" }, // TODO:
|
||||
{ os: "windows", arch: "x64" },
|
||||
{ os: "windows", arch: "x64", baseline: true },
|
||||
];
|
||||
@@ -389,12 +312,14 @@ function getPipeline(buildId) {
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
|
||||
// { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
|
||||
// { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
|
||||
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
|
||||
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
|
||||
];
|
||||
@@ -431,59 +356,82 @@ function getPipeline(buildId) {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Checking environment...");
|
||||
console.log(" - Repository:", getRepository());
|
||||
console.log(" - Branch:", getBranch());
|
||||
console.log(" - Commit:", getCommit());
|
||||
console.log(" - Commit Message:", getCommitMessage());
|
||||
console.log(" - Is Main Branch:", isMainBranch());
|
||||
console.log(" - Is Merge Queue:", isMergeQueue());
|
||||
console.log(" - Is Pull Request:", isPullRequest());
|
||||
printEnvironment();
|
||||
|
||||
const changedFiles = await getChangedFiles();
|
||||
console.log("Checking last successful build...");
|
||||
const lastBuild = await getLastSuccessfulBuild();
|
||||
if (lastBuild) {
|
||||
const { id, path, commit_id: commit } = lastBuild;
|
||||
console.log(" - Build ID:", id);
|
||||
console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString());
|
||||
console.log(" - Commit:", commit);
|
||||
} else {
|
||||
console.log(" - No build found");
|
||||
}
|
||||
|
||||
console.log("Checking changed files...");
|
||||
const baseRef = getCommit();
|
||||
console.log(" - Base Ref:", baseRef);
|
||||
const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch();
|
||||
console.log(" - Head Ref:", headRef);
|
||||
|
||||
const changedFiles = await getChangedFiles(undefined, baseRef, headRef);
|
||||
if (changedFiles) {
|
||||
console.log(
|
||||
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
|
||||
);
|
||||
if (changedFiles.length) {
|
||||
changedFiles.forEach(filename => console.log(` - ${filename}`));
|
||||
} else {
|
||||
console.log(" - No changed files");
|
||||
}
|
||||
}
|
||||
|
||||
const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename);
|
||||
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
|
||||
|
||||
const isSkip = () => {
|
||||
console.log("Checking if CI should be skipped...");
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
if (/\[(skip ci|no ci|ci skip|ci no)\]/i.test(message)) {
|
||||
return true;
|
||||
const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
return;
|
||||
}
|
||||
return changedFiles && changedFiles.every(filename => isDocumentationFile(filename));
|
||||
};
|
||||
|
||||
if (isSkip()) {
|
||||
console.log("Skipping CI due to commit message or changed files...");
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are documentation");
|
||||
return;
|
||||
}
|
||||
|
||||
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
|
||||
|
||||
const isSkipBuild = () => {
|
||||
console.log("Checking if build should be skipped...");
|
||||
let skipBuild;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
if (/\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.test(message)) {
|
||||
return true;
|
||||
const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
skipBuild = true;
|
||||
}
|
||||
return changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename));
|
||||
};
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are tests or documentation");
|
||||
skipBuild = true;
|
||||
}
|
||||
|
||||
let buildId;
|
||||
if (isSkipBuild()) {
|
||||
buildId = await getBuildIdWithArtifacts();
|
||||
if (buildId) {
|
||||
console.log("Skipping build due to commit message or changed files...");
|
||||
console.log("Using build artifacts from previous build:", buildId);
|
||||
} else {
|
||||
console.log("Attempted to skip build, but could not find previous build");
|
||||
console.log("Checking if build is a named release...");
|
||||
let buildRelease;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(release|release build|build release)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
buildRelease = true;
|
||||
}
|
||||
}
|
||||
|
||||
const pipeline = getPipeline(buildId);
|
||||
console.log("Generating pipeline...");
|
||||
const pipeline = getPipeline(lastBuild && skipBuild ? lastBuild.id : undefined);
|
||||
const content = toYaml(pipeline);
|
||||
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
|
||||
writeFileSync(contentPath, content);
|
||||
@@ -491,6 +439,15 @@ async function main() {
|
||||
console.log("Generated pipeline:");
|
||||
console.log(" - Path:", contentPath);
|
||||
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
|
||||
|
||||
if (isBuildkite) {
|
||||
console.log("Setting canary revision...");
|
||||
const canaryRevision = buildRelease ? 0 : await getCanaryRevision();
|
||||
await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]);
|
||||
|
||||
console.log("Uploading pipeline...");
|
||||
await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]);
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
|
||||
@@ -2,106 +2,10 @@
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_build() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v buildkite-agent &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_jq() {
|
||||
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
|
||||
}
|
||||
|
||||
function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_node() {
|
||||
assert_command "node" "node" "https://nodejs.org/en/download/"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_release() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
run_command buildkite-agent meta-data set canary "0"
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ]; then
|
||||
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
|
||||
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
|
||||
if [ "$tag" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
|
||||
if [ "$revision" == "null" ]; then
|
||||
canary="1"
|
||||
else
|
||||
canary="$revision"
|
||||
fi
|
||||
fi
|
||||
run_command buildkite-agent meta-data set canary "$canary"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_buildkite_pipeline() {
|
||||
local path="$1"
|
||||
if [ ! -f "$path" ]; then
|
||||
echo "error: Cannot find pipeline: $path"
|
||||
exit 1
|
||||
fi
|
||||
run_command buildkite-agent pipeline upload "$path"
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_node
|
||||
assert_release
|
||||
assert_canary
|
||||
|
||||
run_command node ".buildkite/ci.mjs"
|
||||
|
||||
if [ -f ".buildkite/ci.yml" ]; then
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
fi
|
||||
|
||||
@@ -162,6 +162,25 @@ function upload_s3_file() {
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function send_bench_webhook() {
|
||||
if [ -z "$BENCHMARK_URL" ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
local tag="$1"
|
||||
local commit="$BUILDKITE_COMMIT"
|
||||
local artifact_path="${commit}"
|
||||
|
||||
if [ "$tag" == "canary" ]; then
|
||||
artifact_path="${commit}-canary"
|
||||
fi
|
||||
|
||||
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
|
||||
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
|
||||
|
||||
curl -X POST "$webhook_url"
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
@@ -206,6 +225,7 @@ function create_release() {
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
send_bench_webhook "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
|
||||
@@ -11,5 +11,8 @@ packages/**/bun-profile
|
||||
src/bun.js/WebKit
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-build
|
||||
zig-cache
|
||||
zig-out
|
||||
.zig-cache
|
||||
zig-out
|
||||
build
|
||||
vendor
|
||||
node_modules
|
||||
|
||||
@@ -285,7 +285,7 @@ If you see this error when compiling, run:
|
||||
$ xcode-select --install
|
||||
```
|
||||
|
||||
## Cannot find `libatomic.a`
|
||||
### Cannot find `libatomic.a`
|
||||
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
@@ -295,7 +295,7 @@ $ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
## ccache conflicts with building TinyCC on macOS
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
@@ -303,3 +303,9 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
|
||||
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<p align="center">
|
||||
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
|
||||
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
|
||||
</p>
|
||||
<h1 align="center">Bun</h1>
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { copyFileSync, statSync, writeFileSync } from "node:fs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function runner(ready) {
|
||||
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const MAP_SIZE = 10_000;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const SET_SIZE = 10_000;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { group } from "mitata";
|
||||
import EventEmitterNative from "node:events";
|
||||
import { group } from "../runner.mjs";
|
||||
|
||||
export const implementations = [
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
// Pseudo RNG is derived from https://stackoverflow.com/a/424445
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const count = 100;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const count = 100;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { CString, dlopen, ptr } from "bun:ffi";
|
||||
import { bench, group, run } from "mitata";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
|
||||
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, group, run } from "mitata";
|
||||
import { createRequire } from "node:module";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import braces from "braces";
|
||||
import { bench, group, run } from "mitata";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
// const iterations = 1000;
|
||||
const iterations = 100;
|
||||
@@ -10,15 +10,16 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
|
||||
|
||||
console.log(braces(complexPattern, { expand: true }));
|
||||
function benchPattern(pattern, name) {
|
||||
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
|
||||
const _name = `${name} pattern: "${pattern}"`;
|
||||
group({ name: _name, summary: true }, () => {
|
||||
if (typeof Bun !== "undefined")
|
||||
bench("Bun", () => {
|
||||
bench(`Bun (${_name})`, () => {
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
Bun.$.braces(pattern);
|
||||
}
|
||||
});
|
||||
|
||||
bench("micromatch/braces", () => {
|
||||
bench(`micromatch/braces ${_name}`, () => {
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
braces(pattern, { expand: true });
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import micromatch from "micromatch";
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fg from "fast-glob";
|
||||
import { fdir } from "fdir";
|
||||
import { bench, group, run } from "mitata";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const normalPattern = "*.ts";
|
||||
const recursivePattern = "**/*.ts";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { gunzipSync, gzipSync } from "bun";
|
||||
import { bench, group, run } from "mitata";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { readFileSync } from "fs";
|
||||
import { bench, run } from "mitata";
|
||||
import { createRequire } from "module";
|
||||
import { gunzipSync, gzipSync } from "zlib";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("console.log('hello')", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("console.log", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("console.log", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../../runner.mjs";
|
||||
import {
|
||||
arch,
|
||||
cpus,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { bench, run } from "mitata";
|
||||
import {
|
||||
arch,
|
||||
cpus,
|
||||
@@ -19,6 +18,7 @@ import {
|
||||
userInfo,
|
||||
version,
|
||||
} from "node:os";
|
||||
import { bench, run } from "../../runner.mjs";
|
||||
|
||||
bench("cpus()", () => cpus());
|
||||
bench("networkInterfaces()", () => networkInterfaces());
|
||||
|
||||
@@ -13,7 +13,9 @@
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^0.1.6",
|
||||
"mitata": "^1.0.10",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
|
||||
19
bench/runner.mjs
Normal file
19
bench/runner.mjs
Normal file
@@ -0,0 +1,19 @@
|
||||
import * as Mitata from "mitata";
|
||||
import process from "node:process";
|
||||
|
||||
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
|
||||
|
||||
/** @param {Parameters<typeof Mitata["run"]>["0"]} opts */
|
||||
export function run(opts = {}) {
|
||||
if (asJSON) {
|
||||
opts.format = "json";
|
||||
}
|
||||
|
||||
return Mitata.run(opts);
|
||||
}
|
||||
|
||||
export const bench = Mitata.bench;
|
||||
|
||||
export function group(_name, fn) {
|
||||
return Mitata.group(fn);
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function doIt(...args) {
|
||||
// we use .at() to prevent constant folding optimizations
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// https://github.com/oven-sh/bun/issues/1096
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const identity = x => x;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var myArray = new Array(5);
|
||||
bench("[1, 2, 3, 4, 5].shift()", () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
var comparator = (a, b) => a - b;
|
||||
|
||||
const numbers = [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// @runtime bun
|
||||
import { ArrayBufferSink } from "bun";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var short = "Hello World!";
|
||||
var shortUTF16 = "Hello World 💕💕💕";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as assert from "assert";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("deepEqual", () => {
|
||||
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("noop", function () {});
|
||||
bench("async function(){}", async function () {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function makeBenchmark(size) {
|
||||
const latin1 = btoa("A".repeat(size));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("new Blob(['hello world'])", function () {
|
||||
return new Blob(["hello world"]);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function makeBenchmark(size, isToString) {
|
||||
const base64Input = Buffer.alloc(size, "latin1").toString("base64");
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
|
||||
const first = Buffer.allocUnsafe(size);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// @runtime bun,node,deno
|
||||
import { Buffer } from "node:buffer";
|
||||
import process from "node:process";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
|
||||
var isBuffer = new Buffer(0);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
|
||||
for (let fillSize of [4, 8, 16, 11]) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Buffer } from "node:buffer";
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const bigBuffer = Buffer.from("hello world".repeat(10000));
|
||||
const converted = bigBuffer.toString("base64");
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import Color from "color";
|
||||
import tinycolor from "tinycolor2";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
|
||||
|
||||
for (const input of inputs) {
|
||||
group(`${input}`, () => {
|
||||
if (typeof Bun !== "undefined") {
|
||||
bench("Bun.color()", () => {
|
||||
bench(`Bun.color() (${input})`, () => {
|
||||
Bun.color(input, "css");
|
||||
});
|
||||
}
|
||||
|
||||
bench("color", () => {
|
||||
bench(`color (${input})`, () => {
|
||||
Color(input).hex();
|
||||
});
|
||||
|
||||
bench("'tinycolor2'", () => {
|
||||
bench(`'tinycolor2' (${input})`, () => {
|
||||
tinycolor(input).toHexString();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { allocUnsafe } from "bun";
|
||||
import { readFileSync } from "fs";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
function polyfill(chunks) {
|
||||
var size = 0;
|
||||
@@ -41,15 +41,16 @@ const chunkGroups = [
|
||||
];
|
||||
|
||||
for (const chunks of chunkGroups) {
|
||||
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
|
||||
bench("Bun.concatArrayBuffers", () => {
|
||||
const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`
|
||||
group(name, () => {
|
||||
bench(`Bun.concatArrayBuffers (${name})`, () => {
|
||||
Bun.concatArrayBuffers(chunks);
|
||||
});
|
||||
bench("Uint8Array.set", () => {
|
||||
bench(`Uint8Array.set (${name})`, () => {
|
||||
polyfill(chunks);
|
||||
});
|
||||
|
||||
bench("Uint8Array.set (uninitialized memory)", () => {
|
||||
bench(`Uint8Array.set (uninitialized memory) (${name})`, () => {
|
||||
polyfillUninitialized(chunks);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const json = {
|
||||
login: "wongmjane",
|
||||
|
||||
@@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs";
|
||||
import { cp } from "fs/promises";
|
||||
import { tmpdir } from "os";
|
||||
import { join, resolve } from "path";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
import { fileURLToPath } from "url";
|
||||
const hugeDirectory = (() => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// https://github.com/oven-sh/bun/issues/2190
|
||||
import { bench, run } from "mitata";
|
||||
import { createHash } from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const data =
|
||||
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
import crypto from "node:crypto";
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("crypto.randomUUID()", () => {
|
||||
return crypto.randomUUID();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// https://github.com/oven-sh/bun/issues/2190
|
||||
import { bench, run } from "mitata";
|
||||
import { createHash } from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const data =
|
||||
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
var foo = new Uint8Array(65536);
|
||||
bench("crypto.getRandomValues(65536)", () => {
|
||||
crypto.getRandomValues(foo);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import fastDeepEquals from "fast-deep-equal/es6/index";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
// const Date = globalThis.Date;
|
||||
|
||||
function func1() {}
|
||||
@@ -490,7 +490,7 @@ for (let { tests, description } of fixture) {
|
||||
var expected;
|
||||
group(describe, () => {
|
||||
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
|
||||
bench(equalsFn.name, () => {
|
||||
bench(`${describe}: ${equalsFn.name}`, () => {
|
||||
expected = equalsFn(value1, value2);
|
||||
if (expected !== equal) {
|
||||
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const properties = {
|
||||
closed: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { lookup, resolve } from "node:dns/promises";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("(cached) dns.lookup remote x 50", async () => {
|
||||
var tld = "example.com";
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { dns } from "bun";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
async function forEachBackend(name, fn) {
|
||||
group(name, () => {
|
||||
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
|
||||
bench(backend, fn(backend));
|
||||
bench(`${backend} (${name})`, fn(backend));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const encoder = new TextEncoder();
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var err = new Error();
|
||||
bench("Error.captureStackTrace(err)", () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
|
||||
|
||||
@@ -92,24 +92,21 @@ function reactEscapeHtml(string) {
|
||||
// }
|
||||
|
||||
for (let input of [
|
||||
`long string, nothing to escape... `.repeat(9999999 * 3),
|
||||
"long string, nothing to escape... ".repeat(9999999 * 3),
|
||||
FIXTURE.repeat(8000),
|
||||
// "[unicode]" + FIXTURE_WITH_UNICODE,
|
||||
]) {
|
||||
const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`
|
||||
group(
|
||||
{
|
||||
summary: true,
|
||||
name:
|
||||
`"` +
|
||||
input.substring(0, Math.min(input.length, 32)) +
|
||||
`"` +
|
||||
` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`,
|
||||
name
|
||||
},
|
||||
() => {
|
||||
// bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input));
|
||||
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
|
||||
// bench(`he.escape`, () => heEscape(input));
|
||||
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
|
||||
bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input));
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { dlopen } from "bun:ffi";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const types = {
|
||||
returns_true: {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const blob = new Blob(["foo", "bar", "baz"]);
|
||||
bench("FormData.append", () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// pure JS implementation will optimze this out
|
||||
bench("new Headers", function () {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const input =
|
||||
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var obj = {
|
||||
"restApiRoot": "/api",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This is a stress test of some internals in How Bun does the module.exports assignment.
|
||||
// If it crashes or throws then this fails
|
||||
import("./runner.mjs").then(({ bench, run }) => {
|
||||
import("../runner.mjs").then(({ bench, run }) => {
|
||||
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
|
||||
Object.defineProperty(module, "exports", {
|
||||
get() {
|
||||
@@ -36,7 +36,9 @@ import("./runner.mjs").then(({ bench, run }) => {
|
||||
a: 1,
|
||||
};
|
||||
|
||||
console.log(
|
||||
const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {};
|
||||
|
||||
log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id],
|
||||
@@ -49,10 +51,11 @@ import("./runner.mjs").then(({ bench, run }) => {
|
||||
return 42;
|
||||
};
|
||||
|
||||
console.log(module.exports, module.exports());
|
||||
log(module.exports);
|
||||
log(module.exports, module.exports());
|
||||
|
||||
queueMicrotask(() => {
|
||||
console.log(
|
||||
log(
|
||||
module?.exports,
|
||||
require.cache[module.id].exports,
|
||||
module?.exports === require.cache[module.id]?.exports,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// These are no-op C++ functions that are exported to JS.
|
||||
const lazy = globalThis[Symbol.for("Bun.lazy")];
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { IncomingMessage } from "node:http";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const headers = {
|
||||
date: "Mon, 06 Nov 2023 05:12:49 GMT",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// @runtime node, bun
|
||||
import * as vm from "node:vm";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const context = {
|
||||
animal: "cat",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
|
||||
var { function: noopFn, callback } = noop;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// so it can run in environments without node module resolution
|
||||
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const obj = {
|
||||
a: 1,
|
||||
|
||||
@@ -24,7 +24,7 @@ const obj = {
|
||||
w: 23,
|
||||
};
|
||||
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var val = 0;
|
||||
bench("Object.values(literal)", () => {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { posix } from "path";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const pathConfigurations = [
|
||||
"",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { pbkdf2 } from "node:crypto";
|
||||
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const password = "password";
|
||||
const salt = "salt";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { peek } from "bun";
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let pending = Bun.sleep(1000);
|
||||
let resolved = Promise.resolve(1);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
bench("performance.now x 1000", () => {
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
performance.now();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
// This is a benchmark of the performance impact of using private properties.
|
||||
|
||||
bench("Polyfillprivate", () => {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("process.cwd()", () => {
|
||||
process.cwd();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { performance } from "perf_hooks";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("process.memoryUsage()", () => {
|
||||
process.memoryUsage();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("process.stderr.write('hey')", () => {
|
||||
process.stderr.write("hey");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
|
||||
import { renderToReadableStream } from "react-dom/server.browser";
|
||||
import { bench, group, run } from "./runner.mjs";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const App = () => (
|
||||
<div>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createReadStream, writeFileSync } from "node:fs";
|
||||
import { tmpdir } from "node:os";
|
||||
import { sep } from "node:path";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
if (!Promise.withResolvers) {
|
||||
Promise.withResolvers = function () {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { readFileSync, writeFileSync } from "node:fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
var short = (function () {
|
||||
const text = "Hello World!";
|
||||
|
||||
@@ -4,7 +4,7 @@ import { readdir } from "fs/promises";
|
||||
import { relative, resolve } from "path";
|
||||
import { argv } from "process";
|
||||
import { fileURLToPath } from "url";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
|
||||
if (dir.includes(process.cwd())) {
|
||||
@@ -43,8 +43,11 @@ bench(`await readdir("${dir}", {recursive: false})`, async () => {
|
||||
});
|
||||
|
||||
await run();
|
||||
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
|
||||
|
||||
if (count !== syncCount) {
|
||||
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
|
||||
if (!process?.env?.BENCHMARK_RUNNER) {
|
||||
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
|
||||
|
||||
if (count !== syncCount) {
|
||||
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { readFileSync } from "node:fs";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench(`readFileSync(/tmp/404-not-found)`, () => {
|
||||
try {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
const arg = process.argv[process.argv.length - 1];
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// This mostly exists to check for a memory leak in response.clone()
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const req = new Request("http://localhost:3000/");
|
||||
const resp = await fetch("http://example.com");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// This snippet mostly exists to reproduce a memory leak
|
||||
//
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// This snippet mostly exists to reproduce a memory leak
|
||||
import { bench, run } from "mitata";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const obj = {
|
||||
"id": 1296269,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("return await Promise.resolve(1)", async function () {
|
||||
return await Promise.resolve(1);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const blob = new Blob(["<p id='foo'>Hello</p>"]);
|
||||
bench("prepend", async () => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user