Compare commits

...

124 Commits

Author SHA1 Message Date
Jarred Sumner
1d3a978dcc Try zero fill 2024-11-14 07:06:27 -08:00
Jarred Sumner
357581c61a Shrink Bun's binary by 3.5 MB (#15121) 2024-11-14 06:02:15 -08:00
pfg
d8987ccdb8 Remove assertion in js printer triggering for unicode comments (#15143) 2024-11-14 00:14:43 -08:00
Meghan Denny
fdd8d35845 allow zig js host functions to return JSError (#15120) 2024-11-13 21:11:56 -08:00
dave caruso
32ddf343ee bake: csr, streaming ssr, serve integration, safer jsvalue functions, &more (#14900)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-13 18:19:12 -08:00
Meghan Denny
bceb0a2327 ci: fix release script (#15129) 2024-11-13 18:29:14 -07:00
Meghan Denny
9b0cdf01f9 cpp: Bun::toStringRef: return dead when exception has been thrown (#15127)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-13 17:03:59 -08:00
Meghan Denny
35513a9d6d zig: remove JSValue.isEmpty (#15128) 2024-11-13 16:04:13 -08:00
Meghan Denny
f8979b05b1 rid nearly all use of ExceptionRef in zig (#15100)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-11-13 15:23:52 -08:00
ippsav
ec91e91fda Pass missing signal code for child_process.spawnSync (#15137) 2024-11-13 15:07:43 -08:00
Meghan Denny
956853f036 test: dont overwrite root package.json when running bun-ipc-inherit.test.ts (#15126) 2024-11-13 00:14:57 -08:00
Dylan Conway
c5df329772 #15059 follow-up (#15118) 2024-11-12 18:17:35 -08:00
Ciro Spaciari
e945146fde fix(bundler) fix pretty path resolution (#15119) 2024-11-12 18:16:13 -08:00
Ciro Spaciari
873b0a7540 fix(socket) Support named pipes on Windows using forward slashes (#15112) 2024-11-12 16:09:43 -08:00
Dennis Dudek
c785ab921b ci: Fix detection of changed files (#15114)
Co-authored-by: Ashcon Partovi <ashcon@partovi.net>
2024-11-12 15:27:27 -07:00
Meghan Denny
797958082c musl patches [v4] (#15066) 2024-11-11 19:23:58 -08:00
Jarred Sumner
2b9abc20da Use linux syscall interface more in I/O (#15067) 2024-11-11 14:47:04 -08:00
Jarred Sumner
d713001e35 Fixes #14982 2024-11-11 14:40:11 -08:00
Jarred Sumner
b49f6d143e Postgres client - more progress (#15086) 2024-11-11 14:40:02 -08:00
pfg
4cf9851747 Bump runtime transpiler cache version for #15009 (#15094) 2024-11-11 14:38:17 -08:00
pfg
56f7c8887b Fix unicode imports, unicode-escaped variable names, and printClauseAlias not working for utf-8 (#15009) 2024-11-11 13:27:42 -08:00
Ciro Spaciari
62cabe9003 fix(tests) new grpc certs (#15090) 2024-11-11 13:00:58 -08:00
Jarred Sumner
781a392baa Add micro-optimization to fs.readFile (#15076) 2024-11-11 10:35:17 -08:00
Jarred Sumner
d879f4370d Update hashing.md 2024-11-09 02:27:50 -08:00
Jarred Sumner
ae6e23ab28 Update hashing.md 2024-11-09 02:25:18 -08:00
Jarred Sumner
7a9165555d Update hashing.md 2024-11-09 02:19:24 -08:00
Michael H
b54137174b Bench updates (#15029)
Co-authored-by: RiskyMH <RiskyMH@users.noreply.github.com>
2024-11-08 23:15:24 -08:00
Dylan Conway
635789944b update 2024-11-08 21:14:42 -08:00
Jarred Sumner
a1c4f667d9 Support ${configDir} in tsconfig.json (#15063)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-11-08 17:08:54 -08:00
Zack Radisic
07dc1ae547 .defer(), .onStart(), and some small CSS changes (#15041) 2024-11-08 16:38:30 -08:00
Dylan Conway
8f5eab3c84 fix(install): package-lock.json migration and non-existent cafile fixes (#15059) 2024-11-08 14:34:44 -08:00
Jarred Sumner
6ec03b8b05 Add to documentation 2024-11-07 22:54:08 -08:00
Ciro Spaciari
183c661c40 net compatibility improvements (#14933)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-11-07 22:03:53 -08:00
Meghan Denny
c2e7643aa9 musl patches [v3] (#15031) 2024-11-07 21:31:32 -08:00
Jarred Sumner
376b1b4f97 Support preload option in Worker (#15045) 2024-11-07 21:30:51 -08:00
Jarred Sumner
27067d2a6d Allow using Proxy and module namespace objects in APIs (#15043) 2024-11-07 19:45:29 -08:00
Jarred Sumner
1e932ff38b [VSCode extension] Add [eval with bun] code lens on untitled, unsaved scratch JavaScript files (#14983) 2024-11-07 12:30:38 -08:00
Meghan Denny
a116b2281e musl patches [v2] (#15028) 2024-11-06 23:55:23 -08:00
Dylan Conway
66ba6ba061 fix 14957 (#15025) 2024-11-06 19:41:39 -08:00
Jarred Sumner
8d4bb080a3 Implement console.group (#15026) 2024-11-06 19:40:58 -08:00
Ashcon Partovi
bef50a9b9b ci: misc fixes and test runner changes (#15024) 2024-11-06 18:15:55 -07:00
Jarred Sumner
56b57012ed Bump! 2024-11-06 15:05:23 -08:00
Tim Ermilov
c39ae74d3e Fix importing of ES modules from data URIs (#14959) 2024-11-06 15:00:24 -08:00
Christophe Eymard
fc071d3362 remove checks for self closing tags (#14990) 2024-11-06 14:58:31 -08:00
Jarred Sumner
1a9d20e50a Support debugging builtin modules (#15018) 2024-11-06 14:57:35 -08:00
Jarred Sumner
8d8a6bc5c3 Fix prototypical inheritance issue in node:zlib and add a $toClass helper (#15015) 2024-11-06 14:56:10 -08:00
Jarred Sumner
076d1d3d36 Never try using epoll_pwait2 on old linux kernels (#14980) 2024-11-06 03:42:24 -08:00
Jarred Sumner
484c3de861 Show if napi_module_register or dlopen was called in crash reports (#15008) 2024-11-05 20:57:55 -08:00
Meghan Denny
27a1b2413b patches to allow linux-musl to bootstrap (#14994) 2024-11-05 17:22:05 -08:00
Amit Dhamu
3950873272 Fix typo for configuring specific registry (#15001) 2024-11-05 08:11:57 -08:00
pfg
6fb73f2011 Visually center logo in readme (#14969) 2024-11-04 15:28:16 -07:00
pfg
497fa59bf0 Fix #14865 (#14953) 2024-11-02 17:13:31 -07:00
Jarred Sumner
5e5e7c60f1 Fix release build 2024-11-01 20:41:20 -07:00
Ciro Spaciari
85fd471d9d fix(net) fix bytesWritten drain (#14949) 2024-11-01 19:43:42 -07:00
Jarred Sumner
6914c5e32c Fixes #13816 (#13906)
Co-authored-by: pfg <pfg@pfg.pw>
Co-authored-by: Ryan Gonzalez <git@refi64.dev>
Co-authored-by: Ben Grant <ben@bun.sh>
Co-authored-by: Dave Caruso <me@paperdave.net>
2024-11-01 18:38:01 -07:00
Jarred Sumner
ce2afac827 Align "encoding" option in node fs with node (#14937) 2024-11-01 18:16:04 -07:00
Ciro Spaciari
5236d974b5 revert 2024-11-01 17:25:59 -07:00
Ciro Spaciari
6e448619d0 fix drain event, drain must be called only after internal buffer is drained 2024-11-01 17:23:00 -07:00
Jarred Sumner
c89a958299 Fixes #11754 (#14948) 2024-11-01 15:35:28 -07:00
Jarred Sumner
d75488124d Inline process.versions.bun in bun build --compile (#14940) 2024-11-01 14:45:19 -07:00
Ashcon Partovi
4d269995ad Run tests from npm packages, elysia to start (#14932) 2024-11-01 11:57:47 -07:00
190n
71fdb59918 Fix napi property methods on non-objects (#14935) 2024-10-31 21:02:26 -07:00
Dylan Conway
62881ee36b Redact secrets in bunfig.toml and npmrc logs (#14919) 2024-10-31 18:44:24 -07:00
Dylan Conway
6933208790 fix(install): only globally link requested packages (#12506)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-31 18:21:04 -07:00
Jarred Sumner
b1e9e3b31b Add bytesWritten property to Bun.Socket, fix encoding issue in node:net (#14516)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-10-31 17:54:07 -07:00
Jarred Sumner
7035a1107e Fixes #14918 (#14921) 2024-10-31 14:26:19 -07:00
Ashcon Partovi
353d44f1ae ci: If only tests change, use artifacts from last successful build (#14927) 2024-10-31 12:50:09 -07:00
Jarred Sumner
4b8ca51b87 Clean up some code in node validators (#14897) 2024-10-31 12:28:07 -07:00
Ciro Spaciari
f8d5b2e6e2 Fix module resolution cache keys (#14901)
Co-authored-by: dave caruso <me@paperdave.net>
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-10-30 22:06:21 -07:00
190n
9647291d73 Implement NAPI type tagging (#14915) 2024-10-30 19:57:48 -07:00
Jarred Sumner
eaa088ba55 Fix missing symbol errors and add a test (#14907)
Co-authored-by: Jarred Sumner <jarred@bun.sh>
2024-10-30 19:55:42 -07:00
Gerd Jungbluth
955cc6265b fix(docs): add missing character in drizzle guide (#14911) 2024-10-30 08:42:38 -07:00
Dylan Conway
489890deb1 fix(install): check cached package.jsons (#14899) 2024-10-29 18:55:52 -07:00
pfg
d7710c6c67 Fix additional arguments when running a package.json script (#14895)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-29 18:55:32 -07:00
Jarred Sumner
9f70f68f00 EventEmitter.name should be "EventEmitter" instead of "EventEmitter2" (#14898) 2024-10-29 18:42:24 -07:00
Jarred Sumner
240b2a539f Introduce Bun.randomUUIDv7 (#14858)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-10-29 16:39:09 -07:00
Jarred Sumner
b9e5420571 Add https://github.com/uNetworking/uWebSockets/pull/1792 (#14864) 2024-10-29 12:56:25 -07:00
Jarred Sumner
b5a73130ad Reduce memory usage in long-running processes (#14885) 2024-10-29 12:56:10 -07:00
Jarred Sumner
d5f9978007 Fix missing symbol error on llvm 18 2024-10-29 00:08:29 -07:00
pfg
698e87aa67 Fix #14187 (#14884) 2024-10-28 18:11:03 -07:00
Zack Radisic
5502278f3e CSS: More stuff and tests (#14832)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-27 13:49:25 -07:00
Jarred Sumner
f005e8c057 Fix HTTP spec issues by upgrading uWS version (#14853) 2024-10-27 12:34:45 -07:00
dave caruso
e93c5ad993 feat(bake): css, production build, dev separateSSRGraph=false (#14622)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-10-27 01:57:36 -07:00
Meghan Denny
5237869101 bun-install-registry.test.ts: remove ini format hint here (#14803) 2024-10-26 16:51:19 -07:00
Jarred Sumner
2456d70ac7 Fixes #14716 (#14834) 2024-10-26 15:15:13 -07:00
Meghan Denny
50d80a805d pm: fix weird package.json formatting after install (#14801) 2024-10-26 01:36:25 -07:00
Meghan Denny
2d9a73fc07 test: fix expected value of 'should perform bin-linking across multiple dependencies' (#14833) 2024-10-26 01:02:24 -07:00
Jarred Sumner
d0b3802a79 github actions 2024-10-25 23:50:12 -07:00
Jarred Sumner
7053212566 Update associate-issue-with-sentry.ts 2024-10-25 23:47:15 -07:00
Jarred Sumner
4f5660a6f7 Add sentry id to crash report comment 2024-10-25 23:40:27 -07:00
Dylan Conway
87279392cf fix 9395 (#14815) 2024-10-25 19:58:45 -07:00
Bjorn Beishline
7f5860331e Fixed compilation issues with no outdir (#14717)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-10-25 18:43:58 -07:00
Dylan Conway
b895738156 fix(install): migrate package-lock.json with dependency on root package (#14811)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-25 01:32:17 -07:00
Dylan Conway
61534c7efe Remove warning for unused registry options from npmrc (#14813) 2024-10-25 01:31:39 -07:00
Jarred Sumner
ec4c9f8f84 Update mimalloc (#14814) 2024-10-25 01:31:24 -07:00
Jarred Sumner
35a64d8585 Bump WebKit (#14812) 2024-10-25 01:31:12 -07:00
Minsoo Choo
eb6995e09b Update SvelteKit usage guide (#14777) 2024-10-25 00:04:32 -07:00
Meghan Denny
1391e5269b Revert "ci: merge clang-format and clang-tidy into single pipeline" (#14809) 2024-10-25 00:04:13 -07:00
Dylan Conway
9621b641a1 update test/bun.lockb (#14746) 2024-10-25 00:03:52 -07:00
Dylan Conway
5eaa7301eb fix(install): patches with bin in package.json (#14807) 2024-10-25 00:03:19 -07:00
Arthur
f21870a06c chore(console): updated jsdoc table (#14792) 2024-10-24 21:20:46 -07:00
Don Isaac
0e4006eefd ci: merge clang-format and clang-tidy into single pipeline (#14798) 2024-10-24 15:26:05 -07:00
Dylan Conway
9643a924e1 bump 2024-10-24 14:24:08 -07:00
Dylan Conway
247456b675 fix(install): continue install if optional postinstall fails (#14783) 2024-10-23 21:58:53 -07:00
Meghan Denny
6f60523e6c " -> ' (#14776)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-23 19:21:36 -07:00
Jarred Sumner
2de2e9f600 48 -> 64 2024-10-23 18:27:02 -07:00
Ciro Spaciari
29bf8a505d fix(tests) pq -> pg + populate before (#14748) 2024-10-23 18:01:06 -07:00
Jarred Sumner
93d115f9b7 Reduce default max network connection limit from 256 to 48 in bun install (#14755) 2024-10-23 15:34:16 -07:00
Ashcon Partovi
74e440d58a ci: Set prioritization based on fork, main branch, or queue 2024-10-23 09:16:48 -07:00
Ashcon Partovi
aa4dde976d ci: Fix pipeline script when on main branch 2024-10-23 09:03:06 -07:00
Ashcon Partovi
eb0e9b9bde ci: Skip builds when only docs are changed (#14751) 2024-10-23 08:54:53 -07:00
Liran Tal
a656cc1b70 docs: fix missing code highlight in spawn.md (#14761) 2024-10-23 01:01:21 -07:00
Ashcon Partovi
4044ff740d ci: add scripts for building macOS images (#14743) 2024-10-22 16:07:12 -07:00
Ashcon Partovi
b9240f6ec7 cmake: only enable LTO when release + linux + ci 2024-10-22 13:10:58 -07:00
Eckhardt (Kaizen) Dreyer
3db0191409 fix(install): Skip optional dependencies if false in bunfig.toml (#14629) 2024-10-22 11:55:10 -07:00
Oliver Medhurst
00b055566e contributing: fix fedora llvm install steps (#14726) 2024-10-22 11:40:46 -07:00
snwy
517cdc1392 fix jsx symbol collisions when importing own variables with same names (#14343)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-22 00:17:18 -07:00
Pham Minh Triet
8b4b55725e Fix(doc): update Next.js guide (#14730) 2024-10-22 00:16:15 -07:00
Jarred Sumner
38d39109b3 Fix assertion failure 2024-10-21 21:46:17 -07:00
Jarred Sumner
ec29311c7a Bump 2024-10-21 18:05:10 -07:00
Ciro Spaciari
fe8d0079ec tls(Server) fix connectionListener and make alpnProtocol more compatible with node.js (#14695)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-10-21 01:58:14 +00:00
Jarred Sumner
8063e9d6b8 Fixes #14411 (#14691) 2024-10-20 22:02:44 +00:00
Vaggelis Papadogiannakis
ae8de1926e Update instructions to run a bun application via pm2 with the use… (#14704) 2024-10-20 15:06:45 -07:00
Minsoo Choo
b9b94de5ed icu required on openSUSE for local webkit build (#14690) 2024-10-20 01:08:42 -07:00
678 changed files with 100637 additions and 17722 deletions

453
.buildkite/ci.mjs Normal file
View File

@@ -0,0 +1,453 @@
#!/usr/bin/env node
/**
* Build and test Bun on macOS, Linux, and Windows.
* @link https://buildkite.com/docs/pipelines/defining-steps
*/
import { writeFileSync } from "node:fs";
import { join } from "node:path";
import {
getCanaryRevision,
getChangedFiles,
getCommit,
getCommitMessage,
getLastSuccessfulBuild,
getMainBranch,
getRepositoryOwner,
getTargetBranch,
isBuildkite,
isFork,
isMainBranch,
isMergeQueue,
printEnvironment,
spawnSafe,
} from "../scripts/utils.mjs";
function toYaml(obj, indent = 0) {
const spaces = " ".repeat(indent);
let result = "";
for (const [key, value] of Object.entries(obj)) {
if (value === undefined) {
continue;
}
if (value === null) {
result += `${spaces}${key}: null\n`;
continue;
}
if (Array.isArray(value)) {
result += `${spaces}${key}:\n`;
value.forEach(item => {
if (typeof item === "object" && item !== null) {
result += `${spaces}- \n${toYaml(item, indent + 2)
.split("\n")
.map(line => `${spaces} ${line}`)
.join("\n")}\n`;
} else {
result += `${spaces}- ${item}\n`;
}
});
continue;
}
if (typeof value === "object") {
result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`;
continue;
}
if (
typeof value === "string" &&
(value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n"))
) {
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
continue;
}
result += `${spaces}${key}: ${value}\n`;
}
return result;
}
function getPipeline(buildId) {
/**
* Helpers
*/
const getKey = platform => {
const { os, arch, abi, baseline } = platform;
if (abi) {
if (baseline) {
return `${os}-${arch}-${abi}-baseline`;
}
return `${os}-${arch}-${abi}`;
}
if (baseline) {
return `${os}-${arch}-baseline`;
}
return `${os}-${arch}`;
};
const getLabel = platform => {
const { os, arch, abi, baseline, release } = platform;
let label = release ? `:${os}: ${release} ${arch}` : `:${os}: ${arch}`;
if (abi) {
label += `-${abi}`;
}
if (baseline) {
label += `-baseline`;
}
return label;
};
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
const getRetry = (limit = 3) => {
return {
automatic: [
{ exit_status: 1, limit: 1 },
{ exit_status: -1, limit },
{ exit_status: 255, limit },
{ signal_reason: "agent_stop", limit },
],
};
};
// https://buildkite.com/docs/pipelines/managing-priorities
const getPriority = () => {
if (isFork()) {
return -1;
}
if (isMainBranch()) {
return 2;
}
if (isMergeQueue()) {
return 1;
}
return 0;
};
/**
* Steps
*/
const getBuildVendorStep = platform => {
const { os, arch, abi, baseline } = platform;
return {
key: `${getKey(platform)}-build-vendor`,
label: `build-vendor`,
agents: {
os,
arch,
abi,
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target dependencies",
};
};
const getBuildCppStep = platform => {
const { os, arch, abi, baseline } = platform;
return {
key: `${getKey(platform)}-build-cpp`,
label: `build-cpp`,
agents: {
os,
arch,
abi,
queue: abi ? `build-${os}-${abi}` : `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_CPP_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getBuildZigStep = platform => {
const { os, arch, abi, baseline } = platform;
const toolchain = getKey(platform);
return {
key: `${getKey(platform)}-build-zig`,
label: `build-zig`,
agents: {
queue: "build-zig",
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
};
const getBuildBunStep = platform => {
const { os, arch, abi, baseline } = platform;
return {
key: `${getKey(platform)}-build-bun`,
label: `build-bun`,
depends_on: [
`${getKey(platform)}-build-vendor`,
`${getKey(platform)}-build-cpp`,
`${getKey(platform)}-build-zig`,
],
agents: {
os,
arch,
abi,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_LINK_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getTestBunStep = platform => {
const { os, arch, abi, distro, release } = platform;
let name;
if (os === "darwin" || os === "windows") {
name = getLabel({ ...platform, release });
} else {
name = getLabel({ ...platform, os: distro, release });
}
let agents;
if (os === "darwin") {
agents = { os, arch, abi, queue: `test-darwin` };
} else if (os === "windows") {
agents = { os, arch, abi, robobun: true };
} else {
agents = { os, arch, abi, distro, release, robobun: true };
}
let command;
if (os === "windows") {
command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`;
} else {
command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`;
}
let parallelism;
if (os === "darwin") {
parallelism = 2;
} else {
parallelism = 10;
}
let depends;
let env;
if (buildId) {
env = {
BUILDKITE_ARTIFACT_BUILD_ID: buildId,
};
} else {
depends = [`${getKey(platform)}-build-bun`];
}
let retry;
if (os !== "windows") {
// When the runner fails on Windows, Buildkite only detects an exit code of 1.
// Because of this, we don't know if the run was fatal, or soft-failed.
retry = getRetry();
}
return {
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
label: `${name} - test-bun`,
depends_on: depends,
agents,
retry,
cancel_on_build_failing: isMergeQueue(),
soft_fail: isMainBranch(),
parallelism,
command,
env,
};
};
/**
* Config
*/
const buildPlatforms = [
{ os: "darwin", arch: "aarch64" },
{ os: "darwin", arch: "x64" },
{ os: "linux", arch: "aarch64" },
// { os: "linux", arch: "aarch64", abi: "musl" }, // TODO:
{ os: "linux", arch: "x64" },
{ os: "linux", arch: "x64", baseline: true },
// { os: "linux", arch: "x64", abi: "musl" }, // TODO:
{ os: "windows", arch: "x64" },
{ os: "windows", arch: "x64", baseline: true },
];
const testPlatforms = [
{ os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "aarch64", distro: "ventura", release: "13" },
{ os: "darwin", arch: "x64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "x64", distro: "ventura", release: "13" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
// { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
// { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "edge" }, // TODO:
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
];
return {
priority: getPriority(),
steps: [
...buildPlatforms.map(platform => {
const { os, arch, baseline } = platform;
let steps = [
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
];
if (!buildId) {
steps.unshift(
getBuildVendorStep(platform),
getBuildCppStep(platform),
getBuildZigStep(platform),
getBuildBunStep(platform),
);
}
return {
key: getKey(platform),
group: getLabel(platform),
steps,
};
}),
],
};
}
async function main() {
printEnvironment();
console.log("Checking last successful build...");
const lastBuild = await getLastSuccessfulBuild();
if (lastBuild) {
const { id, path, commit_id: commit } = lastBuild;
console.log(" - Build ID:", id);
console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString());
console.log(" - Commit:", commit);
} else {
console.log(" - No build found");
}
console.log("Checking changed files...");
const baseRef = isFork() ? `${getRepositoryOwner()}:${getCommit()}` : getCommit();
console.log(" - Base Ref:", baseRef);
const headRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch();
console.log(" - Head Ref:", headRef);
const changedFiles = await getChangedFiles(undefined, baseRef, headRef);
if (changedFiles) {
if (changedFiles.length) {
changedFiles.forEach(filename => console.log(` - ${filename}`));
} else {
console.log(" - No changed files");
}
}
const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename);
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
console.log("Checking if CI should be skipped...");
{
const message = getCommitMessage();
const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
return;
}
}
if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) {
console.log(" - Yes, because all changed files are documentation");
return;
}
console.log("Checking if build should be skipped...");
let skipBuild;
{
const message = getCommitMessage();
const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
skipBuild = true;
}
}
if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) {
console.log(" - Yes, because all changed files are tests or documentation");
skipBuild = true;
}
console.log("Checking if build is a named release...");
let buildRelease;
{
const message = getCommitMessage();
const match = /\[(release|release build|build release)\]/i.exec(message);
if (match) {
const [, reason] = match;
console.log(" - Yes, because commit message contains:", reason);
buildRelease = true;
}
}
console.log("Generating pipeline...");
const pipeline = getPipeline(lastBuild && skipBuild ? lastBuild.id : undefined);
const content = toYaml(pipeline);
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
writeFileSync(contentPath, content);
console.log("Generated pipeline:");
console.log(" - Path:", contentPath);
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
if (isBuildkite) {
console.log("Setting canary revision...");
const canaryRevision = buildRelease ? 0 : await getCanaryRevision();
await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`]);
console.log("Uploading pipeline...");
await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath]);
}
}
await main();

View File

@@ -1,790 +0,0 @@
# Build and test Bun on macOS, Linux, and Windows.
# https://buildkite.com/docs/pipelines/defining-steps
#
# If a step has the `robobun: true` label, robobun will listen
# to webhooks from Buildkite and provision a VM to run the step.
#
# Changes to this file will be automatically uploaded on the next run
# for a particular commit.
steps:
# macOS aarch64
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
- key: "darwin-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-test-macos-14"
label: ":darwin: 14 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
- key: "darwin-aarch64-test-macos-13"
label: ":darwin: 13 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
# macOS x64
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
- key: "darwin-x64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-test-macos-14"
label: ":darwin: 14 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
- key: "darwin-x64-test-macos-13"
label: ":darwin: 13 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
# Linux x64
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64"
- key: "linux-x64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-test-debian-12"
label: ":debian: 12 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
# Linux x64-baseline
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
- key: "linux-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-test-debian-12"
label: ":debian: 12 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
# Linux aarch64
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
- key: "linux-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-test-debian-12"
label: ":debian: 12 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2204"
label: ":ubuntu: 22.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2004"
label: ":ubuntu: 20.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
# Windows x64
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64"
- key: "windows-x64-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-test-bun"
label: ":windows: x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
# Windows x64-baseline
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
- key: "windows-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-test-bun"
label: ":windows: x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-baseline-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"

View File

@@ -2,96 +2,10 @@
set -eo pipefail
function assert_build() {
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
fi
if [ -z "$BUILDKITE_COMMIT" ]; then
echo "error: Cannot find commit for this build"
exit 1
fi
}
function assert_buildkite_agent() {
if ! command -v buildkite-agent &> /dev/null; then
echo "error: Cannot find buildkite-agent, please install it:"
echo "https://buildkite.com/docs/agent/v3/install"
exit 1
fi
}
function assert_jq() {
assert_command "jq" "jq" "https://stedolan.github.io/jq/"
}
function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_command() {
local command="$1"
local package="$2"
local help_url="$3"
if ! command -v "$command" &> /dev/null; then
echo "warning: $command is not installed, installing..."
if command -v brew &> /dev/null; then
HOMEBREW_NO_AUTO_UPDATE=1 brew install "$package"
else
echo "error: Cannot install $command, please install it"
if [ -n "$help_url" ]; then
echo ""
echo "hint: See $help_url for help"
fi
exit 1
fi
fi
}
function assert_release() {
if [ "$RELEASE" == "1" ]; then
run_command buildkite-agent meta-data set canary "0"
fi
}
function assert_canary() {
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ]; then
local repo=$(echo "$BUILDKITE_REPO" | sed -E 's#https://github.com/([^/]+)/([^/]+).git#\1/\2#g')
local tag="$(curl -sL "https://api.github.com/repos/$repo/releases/latest" | jq -r ".tag_name")"
if [ "$tag" == "null" ]; then
canary="1"
else
local revision=$(curl -sL "https://api.github.com/repos/$repo/compare/$tag...$BUILDKITE_COMMIT" | jq -r ".ahead_by")
if [ "$revision" == "null" ]; then
canary="1"
else
canary="$revision"
fi
fi
run_command buildkite-agent meta-data set canary "$canary"
fi
}
function upload_buildkite_pipeline() {
local path="$1"
if [ ! -f "$path" ]; then
echo "error: Cannot find pipeline: $path"
exit 1
fi
run_command buildkite-agent pipeline upload "$path"
}
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_release
assert_canary
upload_buildkite_pipeline ".buildkite/ci.yml"
run_command node ".buildkite/ci.mjs"

View File

@@ -162,6 +162,27 @@ function upload_s3_file() {
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
}
function send_bench_webhook() {
if [ -z "$BENCHMARK_URL" ]; then
echo "error: \$BENCHMARK_URL is not set"
# exit 1 # TODO: this isn't live yet
return
fi
local tag="$1"
local commit="$BUILDKITE_COMMIT"
local artifact_path="${commit}"
if [ "$tag" == "canary" ]; then
artifact_path="${commit}-canary"
fi
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
curl -X POST "$webhook_url"
}
function create_release() {
assert_main
assert_buildkite_agent
@@ -206,6 +227,7 @@ function create_release() {
update_github_release "$tag"
create_sentry_release "$tag"
send_bench_webhook "$tag"
}
function assert_canary() {

View File

@@ -11,5 +11,8 @@ packages/**/bun-profile
src/bun.js/WebKit
src/bun.js/WebKit/LayoutTests
zig-build
zig-cache
zig-out
.zig-cache
zig-out
build
vendor
node_modules

2
.gitattributes vendored
View File

@@ -49,3 +49,5 @@ vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -83,6 +83,26 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -92,7 +112,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -106,6 +126,40 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

5
.gitignore vendored
View File

@@ -141,6 +141,7 @@ test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
# Dependencies
/vendor
@@ -163,3 +164,7 @@ scripts/env.local
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml

58
.vscode/launch.json generated vendored
View File

@@ -22,6 +22,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -37,6 +39,8 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -58,6 +62,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -73,6 +79,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -88,6 +96,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -103,6 +113,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -119,6 +131,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -140,6 +154,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -160,6 +176,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -178,6 +196,8 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -192,6 +212,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -206,6 +228,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -220,6 +244,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -235,6 +261,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -255,6 +283,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -276,6 +306,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -291,6 +323,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -306,6 +340,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -321,6 +357,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -336,6 +374,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -352,6 +392,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -373,6 +415,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -393,6 +437,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -408,6 +454,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -422,6 +470,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -437,6 +487,8 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -456,6 +508,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -470,6 +524,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -1182,6 +1238,8 @@
},
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [

View File

@@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
```
{% /codetabs %}
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
```bash#Fedora
$ sudo dnf install 'dnf-command(copr)'
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
$ sudo dnf install llvm16 clang16 lld16-devel
```
```bash#openSUSE Tumbleweed
@@ -285,17 +285,17 @@ If you see this error when compiling, run:
$ xcode-select --install
```
## Cannot find `libatomic.a`
### Cannot find `libatomic.a`
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
```bash
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
```
The built version of Bun may not work on other systems if compiled this way.
## ccache conflicts with building TinyCC on macOS
### ccache conflicts with building TinyCC on macOS
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
@@ -303,3 +303,9 @@ If you run into issues with `ccache` when building TinyCC, try reinstalling ccac
brew uninstall ccache
brew install ccache
```
## Using bun-debug
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`

2
LATEST
View File

@@ -1 +1 @@
1.1.31
1.1.34

View File

@@ -1,5 +1,5 @@
<p align="center">
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
</p>
<h1 align="center">Bun</h1>

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
bench("sync", () => {});
bench("async", async () => {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("sync", () => {});
bench("async", async () => {});

Binary file not shown.

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { copyFileSync, statSync, writeFileSync } from "node:fs";
import { bench, run } from "../runner.mjs";
function runner(ready) {
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const crypto = require("node:crypto");
const keyPair = crypto.generateKeyPairSync("rsa", {

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const MAP_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { expect } from "bun:test";
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const SET_SIZE = 10_000;

View File

@@ -1,5 +1,5 @@
import { group } from "mitata";
import EventEmitterNative from "node:events";
import { group } from "../runner.mjs";
export const implementations = [
{

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
import { groupForEmitter } from "./implementations.mjs";
var id = 0;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
import { groupForEmitter } from "./implementations.mjs";
// Pseudo RNG is derived from https://stackoverflow.com/a/424445

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const count = 100;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const count = 100;

View File

@@ -1,5 +1,5 @@
import { CString, dlopen, ptr } from "bun:ffi";
import { bench, group, run } from "mitata";
import { bench, group, run } from "../runner.mjs";
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, group, run } from "../runner.mjs";
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;

View File

@@ -1,5 +1,5 @@
import { bench, group, run } from "mitata";
import { createRequire } from "node:module";
import { bench, group, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");

View File

@@ -1,5 +1,5 @@
import braces from "braces";
import { bench, group, run } from "mitata";
import { bench, group, run } from "../runner.mjs";
// const iterations = 1000;
const iterations = 100;
@@ -10,15 +10,16 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
console.log(braces(complexPattern, { expand: true }));
function benchPattern(pattern, name) {
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
const _name = `${name} pattern: "${pattern}"`;
group({ name: _name, summary: true }, () => {
if (typeof Bun !== "undefined")
bench("Bun", () => {
bench(`Bun (${_name})`, () => {
for (let i = 0; i < iterations; i++) {
Bun.$.braces(pattern);
}
});
bench("micromatch/braces", () => {
bench(`micromatch/braces ${_name}`, () => {
for (let i = 0; i < iterations; i++) {
braces(pattern, { expand: true });
}

View File

@@ -1,5 +1,5 @@
import micromatch from "micromatch";
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);

View File

@@ -1,6 +1,6 @@
import fg from "fast-glob";
import { fdir } from "fdir";
import { bench, group, run } from "mitata";
import { bench, group, run } from "../runner.mjs";
const normalPattern = "*.ts";
const recursivePattern = "**/*.ts";

View File

@@ -1,5 +1,5 @@
import { gunzipSync, gzipSync } from "bun";
import { bench, group, run } from "mitata";
import { bench, group, run } from "../runner.mjs";
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const data = new TextEncoder().encode("Hello World!".repeat(9999));

View File

@@ -1,7 +1,7 @@
import { readFileSync } from "fs";
import { bench, run } from "mitata";
import { createRequire } from "module";
import { gunzipSync, gzipSync } from "zlib";
import { bench, run } from "../runner.mjs";
const require = createRequire(import.meta.url);
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("console.log('hello')", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("console.log", () => console.log("hello"));
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../../runner.mjs";
import {
arch,
cpus,

View File

@@ -1,4 +1,3 @@
import { bench, run } from "mitata";
import {
arch,
cpus,
@@ -19,6 +18,7 @@ import {
userInfo,
version,
} from "node:os";
import { bench, run } from "../../runner.mjs";
bench("cpus()", () => cpus());
bench("networkInterfaces()", () => networkInterfaces());

View File

@@ -13,7 +13,9 @@
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fdir": "^6.1.0",
"mitata": "^0.1.6",
"mitata": "^1.0.10",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"

19
bench/runner.mjs Normal file
View File

@@ -0,0 +1,19 @@
import * as Mitata from "mitata";
import process from "node:process";
const asJSON = !!process?.env?.BENCHMARK_RUNNER;
/** @param {Parameters<typeof Mitata["run"]>["0"]} opts */
export function run(opts = {}) {
if (asJSON) {
opts.format = "json";
}
return Mitata.run(opts);
}
export const bench = Mitata.bench;
export function group(_name, fn) {
return Mitata.group(fn);
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
function doIt(...args) {
// we use .at() to prevent constant folding optimizations

View File

@@ -1,5 +1,5 @@
// https://github.com/oven-sh/bun/issues/1096
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const identity = x => x;

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var myArray = new Array(5);
bench("[1, 2, 3, 4, 5].shift()", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var comparator = (a, b) => a - b;
const numbers = [

View File

@@ -1,6 +1,6 @@
// @runtime bun
import { ArrayBufferSink } from "bun";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var short = "Hello World!";
var shortUTF16 = "Hello World 💕💕💕";

View File

@@ -1,5 +1,5 @@
import * as assert from "assert";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("deepEqual", () => {
assert.deepEqual({ foo: "123", bar: "baz" }, { foo: "123", bar: "baz" });

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("noop", function () {});
bench("async function(){}", async function () {});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
function makeBenchmark(size) {
const latin1 = btoa("A".repeat(size));

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
bench("new Blob(['hello world'])", function () {
return new Blob(["hello world"]);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
function makeBenchmark(size, isToString) {
const base64Input = Buffer.alloc(size, "latin1").toString("base64");

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
const first = Buffer.allocUnsafe(size);

View File

@@ -1,7 +1,7 @@
// @runtime bun,node,deno
import { Buffer } from "node:buffer";
import process from "node:process";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const N = parseInt(process.env.RUN_COUNTER ?? "10000", 10);
var isBuffer = new Buffer(0);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {

View File

@@ -1,6 +1,6 @@
import { Buffer } from "node:buffer";
import crypto from "node:crypto";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const bigBuffer = Buffer.from("hello world".repeat(10000));
const converted = bigBuffer.toString("base64");

View File

@@ -1,22 +1,22 @@
import Color from "color";
import tinycolor from "tinycolor2";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
const inputs = ["#f00", "rgb(255, 0, 0)", "rgba(255, 0, 0, 1)", "hsl(0, 100%, 50%)"];
for (const input of inputs) {
group(`${input}`, () => {
if (typeof Bun !== "undefined") {
bench("Bun.color()", () => {
bench(`Bun.color() (${input})`, () => {
Bun.color(input, "css");
});
}
bench("color", () => {
bench(`color (${input})`, () => {
Color(input).hex();
});
bench("'tinycolor2'", () => {
bench(`'tinycolor2' (${input})`, () => {
tinycolor(input).toHexString();
});
});

View File

@@ -1,6 +1,6 @@
import { allocUnsafe } from "bun";
import { readFileSync } from "fs";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
function polyfill(chunks) {
var size = 0;
@@ -41,15 +41,16 @@ const chunkGroups = [
];
for (const chunks of chunkGroups) {
group(`${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`, () => {
bench("Bun.concatArrayBuffers", () => {
const name = `${chunks.reduce((prev, curr, i, a) => prev + curr.byteLength, 0)} bytes for ${chunks.length} chunks`
group(name, () => {
bench(`Bun.concatArrayBuffers (${name})`, () => {
Bun.concatArrayBuffers(chunks);
});
bench("Uint8Array.set", () => {
bench(`Uint8Array.set (${name})`, () => {
polyfill(chunks);
});
bench("Uint8Array.set (uninitialized memory)", () => {
bench(`Uint8Array.set (uninitialized memory) (${name})`, () => {
polyfillUninitialized(chunks);
});
});

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const json = {
login: "wongmjane",

View File

@@ -2,7 +2,7 @@ import { mkdirSync, rmSync, writeFileSync } from "fs";
import { cp } from "fs/promises";
import { tmpdir } from "os";
import { join, resolve } from "path";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
import { fileURLToPath } from "url";
const hugeDirectory = (() => {

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
import crypto from "node:crypto";

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("crypto.randomUUID()", () => {
return crypto.randomUUID();

View File

@@ -1,6 +1,6 @@
// https://github.com/oven-sh/bun/issues/2190
import { bench, run } from "mitata";
import { createHash } from "node:crypto";
import { bench, run } from "../runner.mjs";
const data =
"Delightful remarkably mr on announcing themselves entreaties favourable. About to in so terms voice at. Equal an would is found seems of. The particular friendship one sufficient terminated frequently themselves. It more shed went up is roof if loud case. Delay music in lived noise an. Beyond genius really enough passed is up.";

View File

@@ -1,6 +1,6 @@
// so it can run in environments without node module resolution
import crypto from "node:crypto";
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
var foo = new Uint8Array(65536);
bench("crypto.getRandomValues(65536)", () => {
crypto.getRandomValues(foo);

View File

@@ -1,5 +1,5 @@
import fastDeepEquals from "fast-deep-equal/es6/index";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
// const Date = globalThis.Date;
function func1() {}
@@ -490,7 +490,7 @@ for (let { tests, description } of fixture) {
var expected;
group(describe, () => {
for (let equalsFn of [Bun.deepEquals, fastDeepEquals]) {
bench(equalsFn.name, () => {
bench(`${describe}: ${equalsFn.name}`, () => {
expected = equalsFn(value1, value2);
if (expected !== equal) {
throw new Error(`Expected ${expected} to be ${equal} for ${description}`);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const properties = {
closed: {

View File

@@ -1,5 +1,5 @@
import { lookup, resolve } from "node:dns/promises";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("(cached) dns.lookup remote x 50", async () => {
var tld = "example.com";

View File

@@ -1,10 +1,10 @@
import { dns } from "bun";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
async function forEachBackend(name, fn) {
group(name, () => {
for (let backend of ["libc", "c-ares", process.platform === "darwin" ? "system" : ""].filter(Boolean))
bench(backend, fn(backend));
bench(`${backend} (${name})`, fn(backend));
});
}

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const encoder = new TextEncoder();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var err = new Error();
bench("Error.captureStackTrace(err)", () => {

View File

@@ -1,4 +1,4 @@
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
var bunEscapeHTML = globalThis.escapeHTML || Bun.escapeHTML;
@@ -92,24 +92,21 @@ function reactEscapeHtml(string) {
// }
for (let input of [
`long string, nothing to escape... `.repeat(9999999 * 3),
"long string, nothing to escape... ".repeat(9999999 * 3),
FIXTURE.repeat(8000),
// "[unicode]" + FIXTURE_WITH_UNICODE,
]) {
const name = `"${input.substring(0, Math.min(input.length, 32))}" (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`
group(
{
summary: true,
name:
`"` +
input.substring(0, Math.min(input.length, 32)) +
`"` +
` (${new Intl.NumberFormat().format(input.length / 100_000_000_0)} GB)`,
name
},
() => {
// bench(`ReactDOM.escapeHTML`, () => reactEscapeHtml(input));
// bench(`html-entities.encode`, () => htmlEntityEncode(input));
// bench(`he.escape`, () => heEscape(input));
bench(`Bun.escapeHTML`, () => bunEscapeHTML(input));
bench(`Bun.escapeHTML (${name})`, () => bunEscapeHTML(input));
},
);
}

View File

@@ -1,5 +1,5 @@
import { dlopen } from "bun:ffi";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
const types = {
returns_true: {

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const blob = new Blob(["foo", "bar", "baz"]);
bench("FormData.append", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
// pure JS implementation will optimze this out
bench("new Headers", function () {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const input =
"Hello, World! foo bar baz qux quux corge grault garply waldo fred plugh xyzzy thud z a b c d e f g h i j k l m n o p q r s t u v w x y z".split(

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var obj = {
"restApiRoot": "/api",

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));

View File

@@ -1,6 +1,6 @@
// This is a stress test of some internals in How Bun does the module.exports assignment.
// If it crashes or throws then this fails
import("./runner.mjs").then(({ bench, run }) => {
import("../runner.mjs").then(({ bench, run }) => {
bench("Object.defineProperty(module, 'exports', { get() { return 42; } })", () => {
Object.defineProperty(module, "exports", {
get() {
@@ -36,7 +36,9 @@ import("./runner.mjs").then(({ bench, run }) => {
a: 1,
};
console.log(
const log = !process?.env?.BENCHMARK_RUNNER ? console.log : () => {};
log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id],
@@ -49,10 +51,11 @@ import("./runner.mjs").then(({ bench, run }) => {
return 42;
};
console.log(module.exports, module.exports());
log(module.exports);
log(module.exports, module.exports());
queueMicrotask(() => {
console.log(
log(
module?.exports,
require.cache[module.id].exports,
module?.exports === require.cache[module.id]?.exports,

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
// These are no-op C++ functions that are exported to JS.
const lazy = globalThis[Symbol.for("Bun.lazy")];

View File

@@ -1,5 +1,5 @@
import { IncomingMessage } from "node:http";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const headers = {
date: "Mon, 06 Nov 2023 05:12:49 GMT",

View File

@@ -1,6 +1,6 @@
// @runtime node, bun
import * as vm from "node:vm";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const context = {
animal: "cat",

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var noop = globalThis[Symbol.for("Bun.lazy")]("noop");
var { function: noopFn, callback } = noop;

View File

@@ -1,5 +1,5 @@
// so it can run in environments without node module resolution
import { bench, run } from "../../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
const obj = {
a: 1,

View File

@@ -24,7 +24,7 @@ const obj = {
w: 23,
};
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var val = 0;
bench("Object.values(literal)", () => {

View File

@@ -1,5 +1,5 @@
import { bench, run } from "mitata";
import { posix } from "path";
import { bench, run } from "../runner.mjs";
const pathConfigurations = [
"",

View File

@@ -1,6 +1,6 @@
import { pbkdf2 } from "node:crypto";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const password = "password";
const salt = "salt";

View File

@@ -1,5 +1,5 @@
import { peek } from "bun";
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
let pending = Bun.sleep(1000);
let resolved = Promise.resolve(1);

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("performance.now x 1000", () => {
for (let i = 0; i < 1000; i++) {
performance.now();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
import { bench, run } from "../runner.mjs";
// This is a benchmark of the performance impact of using private properties.
bench("Polyfillprivate", () => {

View File

@@ -1,4 +1,4 @@
import { bench, run } from "mitata";
import { bench, run } from "../runner.mjs";
bench("process.cwd()", () => {
process.cwd();

View File

@@ -1,5 +1,5 @@
import { performance } from "perf_hooks";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("process.memoryUsage()", () => {
process.memoryUsage();

View File

@@ -1,4 +1,4 @@
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench("process.stderr.write('hey')", () => {
process.stderr.write("hey");

View File

@@ -1,6 +1,6 @@
import { renderToReadableStream as renderToReadableStreamBun } from "react-dom/server";
import { renderToReadableStream } from "react-dom/server.browser";
import { bench, group, run } from "./runner.mjs";
import { bench, group, run } from "../runner.mjs";
const App = () => (
<div>

View File

@@ -1,7 +1,7 @@
import { createReadStream, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { sep } from "node:path";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
if (!Promise.withResolvers) {
Promise.withResolvers = function () {

View File

@@ -1,5 +1,5 @@
import { readFileSync, writeFileSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
var short = (function () {
const text = "Hello World!";

View File

@@ -4,7 +4,7 @@ import { readdir } from "fs/promises";
import { relative, resolve } from "path";
import { argv } from "process";
import { fileURLToPath } from "url";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url)));
if (dir.includes(process.cwd())) {
@@ -43,8 +43,11 @@ bench(`await readdir("${dir}", {recursive: false})`, async () => {
});
await run();
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
if (count !== syncCount) {
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
if (!process?.env?.BENCHMARK_RUNNER) {
console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n");
if (count !== syncCount) {
throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`);
}
}

View File

@@ -1,6 +1,6 @@
import { readFileSync } from "node:fs";
import { readFile } from "node:fs/promises";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
bench(`readFileSync(/tmp/404-not-found)`, () => {
try {

View File

@@ -1,5 +1,5 @@
import { realpathSync } from "node:fs";
import { bench, run } from "./runner.mjs";
import { bench, run } from "../runner.mjs";
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
const arg = process.argv[process.argv.length - 1];

Some files were not shown because too many files have changed in this diff Show More