Compare commits

..

68 Commits

Author SHA1 Message Date
Jarred Sumner
139734a0a5 Merge branch 'main' into dylan/remove-always-decode 2024-10-26 01:36:36 -07:00
Meghan Denny
50d80a805d pm: fix weird package.json formatting after install (#14801) 2024-10-26 01:36:25 -07:00
Meghan Denny
2d9a73fc07 test: fix expected value of 'should perform bin-linking across multiple dependencies' (#14833) 2024-10-26 01:02:24 -07:00
Jarred Sumner
d0b3802a79 github actions 2024-10-25 23:50:12 -07:00
Jarred Sumner
7053212566 Update associate-issue-with-sentry.ts 2024-10-25 23:47:15 -07:00
Jarred Sumner
4f5660a6f7 Add sentry id to crash report comment 2024-10-25 23:40:27 -07:00
Dylan Conway
87279392cf fix 9395 (#14815) 2024-10-25 19:58:45 -07:00
Bjorn Beishline
7f5860331e Fixed compilation issues with no outdir (#14717)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-10-25 18:43:58 -07:00
Dylan Conway
b895738156 fix(install): migrate package-lock.json with dependency on root package (#14811)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-25 01:32:17 -07:00
Dylan Conway
61534c7efe Remove warning for unused registry options from npmrc (#14813) 2024-10-25 01:31:39 -07:00
Jarred Sumner
ec4c9f8f84 Update mimalloc (#14814) 2024-10-25 01:31:24 -07:00
Jarred Sumner
35a64d8585 Bump WebKit (#14812) 2024-10-25 01:31:12 -07:00
Minsoo Choo
eb6995e09b Update SvelteKit usage guide (#14777) 2024-10-25 00:04:32 -07:00
Meghan Denny
1391e5269b Revert "ci: merge clang-format and clang-tidy into single pipeline" (#14809) 2024-10-25 00:04:13 -07:00
Dylan Conway
9621b641a1 update test/bun.lockb (#14746) 2024-10-25 00:03:52 -07:00
Dylan Conway
5eaa7301eb fix(install): patches with bin in package.json (#14807) 2024-10-25 00:03:19 -07:00
Arthur
f21870a06c chore(console): updated jsdoc table (#14792) 2024-10-24 21:20:46 -07:00
Don Isaac
0e4006eefd ci: merge clang-format and clang-tidy into single pipeline (#14798) 2024-10-24 15:26:05 -07:00
Dylan Conway
9643a924e1 bump 2024-10-24 14:24:08 -07:00
Dylan Conway
247456b675 fix(install): continue install if optional postinstall fails (#14783) 2024-10-23 21:58:53 -07:00
Meghan Denny
6f60523e6c " -> ' (#14776)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-23 19:21:36 -07:00
Jarred Sumner
2de2e9f600 48 -> 64 2024-10-23 18:27:02 -07:00
Ciro Spaciari
29bf8a505d fix(tests) pq -> pg + populate before (#14748) 2024-10-23 18:01:06 -07:00
Dylan Conway
0edd2ef1fe remove always_decode_escape_sequences 2024-10-23 17:07:35 -07:00
Jarred Sumner
93d115f9b7 Reduce default max network connection limit from 256 to 48 in bun install (#14755) 2024-10-23 15:34:16 -07:00
Ashcon Partovi
74e440d58a ci: Set prioritization based on fork, main branch, or queue 2024-10-23 09:16:48 -07:00
Ashcon Partovi
aa4dde976d ci: Fix pipeline script when on main branch 2024-10-23 09:03:06 -07:00
Ashcon Partovi
eb0e9b9bde ci: Skip builds when only docs are changed (#14751) 2024-10-23 08:54:53 -07:00
Liran Tal
a656cc1b70 docs: fix missing code highlight in spawn.md (#14761) 2024-10-23 01:01:21 -07:00
Ashcon Partovi
4044ff740d ci: add scripts for building macOS images (#14743) 2024-10-22 16:07:12 -07:00
Ashcon Partovi
b9240f6ec7 cmake: only enable LTO when release + linux + ci 2024-10-22 13:10:58 -07:00
Eckhardt (Kaizen) Dreyer
3db0191409 fix(install): Skip optional dependencies if false in bunfig.toml (#14629) 2024-10-22 11:55:10 -07:00
Oliver Medhurst
00b055566e contributing: fix fedora llvm install steps (#14726) 2024-10-22 11:40:46 -07:00
snwy
517cdc1392 fix jsx symbol collisions when importing own variables with same names (#14343)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-22 00:17:18 -07:00
Pham Minh Triet
8b4b55725e Fix(doc): update Next.js guide (#14730) 2024-10-22 00:16:15 -07:00
Jarred Sumner
38d39109b3 Fix assertion failure 2024-10-21 21:46:17 -07:00
Jarred Sumner
ec29311c7a Bump 2024-10-21 18:05:10 -07:00
Ciro Spaciari
fe8d0079ec tls(Server) fix connectionListener and make alpnProtocol more compatible with node.js (#14695)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-10-21 01:58:14 +00:00
Jarred Sumner
8063e9d6b8 Fixes #14411 (#14691) 2024-10-20 22:02:44 +00:00
Vaggelis Papadogiannakis
ae8de1926e Update instructions to run a bun application via pm2 with the use… (#14704) 2024-10-20 15:06:45 -07:00
Minsoo Choo
b9b94de5ed icu required on openSUSE for local webkit build (#14690) 2024-10-20 01:08:42 -07:00
Jarred Sumner
070e5804ad Implement crypto.hash() (#14683) 2024-10-19 12:14:23 -07:00
Jarred Sumner
67b4478137 Fixes #14333 (#14679) 2024-10-19 01:14:13 -07:00
Jarred Sumner
522c9fa22d Clarify some of this 2024-10-19 00:26:30 -07:00
Jarred Sumner
4b63ffeceb Clarify node-fallbacks 2024-10-19 00:23:57 -07:00
Pham Minh Triet
fe45b1e9b9 Fix(doc): SNI typo (#14508) 2024-10-18 22:37:57 -07:00
Jarred Sumner
d41ca824dd Bump 2024-10-18 22:32:42 -07:00
Meghan Denny
663331c56f fix regression in BunJSGlobalObjectDebuggable from most recent webkit upgrade (#14675) 2024-10-18 22:31:39 -07:00
Meghan Denny
64d0b626b9 Bun.color: fill out missing options and examples for outputFormat (#14656)
Co-authored-by: Zack Radisic <56137411+zackradisic@users.noreply.github.com>
2024-10-18 22:29:53 -07:00
Dylan Conway
e5c00ab4b4 fix(CryptoHasher): throw error if update or digest are called after digest (#14677) 2024-10-19 02:21:41 +00:00
Meghan Denny
4f2d924db3 Bun.color: match accepted outputFormat options to error (#14657) 2024-10-19 00:34:56 +00:00
Ashcon Partovi
bf8a75a63f Revert "Remove soft_fail from Buildkite since merge queue is enabled"
This reverts commit 253cc15a9f.
2024-10-18 16:04:58 -07:00
Ashcon Partovi
253cc15a9f Remove soft_fail from Buildkite since merge queue is enabled 2024-10-18 13:28:24 -07:00
Meghan Denny
fbf4b30e70 bun-types: add missing options to DigestEncoding (#14654) 2024-10-18 19:17:10 +00:00
Dylan Conway
f3b658d9f7 fix double free with invalid TLSOptions (#14648) 2024-10-18 05:16:21 +00:00
Ciro Spaciari
b652136cf7 update docs (#14620) 2024-10-18 01:26:50 +00:00
Ashcon Partovi
8376b82371 Fix merge queue (#14646) 2024-10-18 01:22:35 +00:00
Ashcon Partovi
7bb39023b8 Merge queue (#14639) 2024-10-18 01:14:42 +00:00
Meghan Denny
850cdb0587 vscode: set the launch configs' cwd to the root (#14643) 2024-10-17 16:24:10 -07:00
Ciro Spaciari
2f2a24f625 bench: fix grpc and scripts (#14638) 2024-10-17 13:30:47 -07:00
Dylan Conway
e448c4cc3b fs.mkdir empty string bugfix (#14510) 2024-10-16 18:55:49 -07:00
Ciro Spaciari
2d0b557ff7 add grpc-js bench (#14601) 2024-10-16 11:11:53 -07:00
Meghan Denny
15f5ba3e26 jest: print received value when expect().toThrow() doesnt throw (#14608) 2024-10-16 11:11:26 -07:00
refi64
1385f9f686 cmake: force the c-ares libdir to always be 'lib' (#14602) 2024-10-16 10:13:20 -07:00
Ciro Spaciari
07ccec0fd8 H2 fixes (#14606) 2024-10-16 09:06:56 -07:00
Dylan Conway
7283453eed use memset_patternN in Buffer.fill (#14599) 2024-10-15 21:16:57 -07:00
Ciro Spaciari
1a08cfcd6b fix h2 tests failures (#14598) 2024-10-15 18:36:23 -07:00
Meghan Denny
06e733cc64 ci: run clang-format on .h files too (#14597)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-10-15 16:54:49 -07:00
227 changed files with 6665 additions and 3970 deletions

406
.buildkite/ci.mjs Normal file
View File

@@ -0,0 +1,406 @@
#!/usr/bin/env node
/**
* Build and test Bun on macOS, Linux, and Windows.
* @link https://buildkite.com/docs/pipelines/defining-steps
*/
import { writeFileSync } from "node:fs";
import { join } from "node:path";
function getEnv(name, required = true) {
const value = process.env[name];
if (!value && required) {
throw new Error(`Missing environment variable: ${name}`);
}
return value;
}
function getRepository() {
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
if (!match) {
throw new Error(`Unsupported repository: ${url}`);
}
const [, owner, repo] = match;
return `${owner}/${repo}`;
}
function getCommit() {
return getEnv("BUILDKITE_COMMIT");
}
function getBranch() {
return getEnv("BUILDKITE_BRANCH");
}
function getMainBranch() {
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
}
function isFork() {
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
return !!repository && repository !== getEnv("BUILDKITE_REPO");
}
function isMainBranch() {
return getBranch() === getMainBranch() && !isFork();
}
function isMergeQueue() {
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
}
function isPullRequest() {
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
}
async function getChangedFiles() {
const repository = getRepository();
const head = getCommit();
const base = isMainBranch() ? `${head}^1` : getMainBranch();
try {
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
if (response.ok) {
const { files } = await response.json();
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
}
} catch (error) {
console.error(error);
}
}
function isDocumentation(filename) {
return /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/.test(filename);
}
function isTest(filename) {
return /^test/.test(filename);
}
function toYaml(obj, indent = 0) {
const spaces = " ".repeat(indent);
let result = "";
for (const [key, value] of Object.entries(obj)) {
if (value === null) {
result += `${spaces}${key}: null\n`;
continue;
}
if (Array.isArray(value)) {
result += `${spaces}${key}:\n`;
value.forEach(item => {
if (typeof item === "object" && item !== null) {
result += `${spaces}- \n${toYaml(item, indent + 2)
.split("\n")
.map(line => `${spaces} ${line}`)
.join("\n")}\n`;
} else {
result += `${spaces}- ${item}\n`;
}
});
continue;
}
if (typeof value === "object") {
result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`;
continue;
}
if (
typeof value === "string" &&
(value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n"))
) {
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
continue;
}
result += `${spaces}${key}: ${value}\n`;
}
return result;
}
function getPipeline() {
/**
* Helpers
*/
const getKey = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `${os}-${arch}-baseline`;
}
return `${os}-${arch}`;
};
const getLabel = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `:${os}: ${arch}-baseline`;
}
return `:${os}: ${arch}`;
};
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
const getRetry = (limit = 3) => {
return {
automatic: [
{ exit_status: 1, limit: 1 },
{ exit_status: -1, limit },
{ exit_status: 255, limit },
{ signal_reason: "agent_stop", limit },
],
};
};
// https://buildkite.com/docs/pipelines/managing-priorities
const getPriority = () => {
if (isFork()) {
return -1;
}
if (isMainBranch()) {
return 2;
}
if (isMergeQueue()) {
return 1;
}
return 0;
};
/**
* Steps
*/
const getBuildVendorStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-vendor`,
label: `${getLabel(platform)} - build-vendor`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target dependencies",
};
};
const getBuildCppStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-cpp`,
label: `${getLabel(platform)} - build-cpp`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_CPP_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getBuildZigStep = platform => {
const { os, arch, baseline } = platform;
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
return {
key: `${getKey(platform)}-build-zig`,
label: `${getLabel(platform)} - build-zig`,
agents: {
queue: "build-zig",
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
};
const getBuildBunStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-bun`,
label: `${getLabel(platform)} - build-bun`,
depends_on: [
`${getKey(platform)}-build-vendor`,
`${getKey(platform)}-build-cpp`,
`${getKey(platform)}-build-zig`,
],
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_LINK_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getTestBunStep = platform => {
const { os, arch, distro, release } = platform;
let name;
if (os === "darwin" || os === "windows") {
name = getLabel(platform);
} else {
name = getLabel({ ...platform, os: distro });
}
let agents;
if (os === "darwin") {
agents = { os, arch, queue: `test-darwin` };
} else if (os === "windows") {
agents = { os, arch, robobun: true };
} else {
agents = { os, arch, distro, release, robobun: true };
}
let command;
if (os === "windows") {
command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`;
} else {
command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`;
}
let parallelism;
if (os === "darwin") {
parallelism = 2;
} else {
parallelism = 10;
}
return {
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
label: `${name} - test-bun`,
depends_on: [`${getKey(platform)}-build-bun`],
agents,
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
soft_fail: isMainBranch(),
parallelism,
command,
};
};
/**
* Config
*/
const buildPlatforms = [
{ os: "darwin", arch: "aarch64" },
{ os: "darwin", arch: "x64" },
{ os: "linux", arch: "aarch64" },
{ os: "linux", arch: "x64" },
{ os: "linux", arch: "x64", baseline: true },
{ os: "windows", arch: "x64" },
{ os: "windows", arch: "x64", baseline: true },
];
const testPlatforms = [
{ os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "aarch64", distro: "ventura", release: "13" },
{ os: "darwin", arch: "x64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "x64", distro: "ventura", release: "13" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
];
return {
priority: getPriority(),
steps: [
...buildPlatforms.map(platform => {
const { os, arch, baseline } = platform;
return {
key: getKey(platform),
group: getLabel(platform),
steps: [
getBuildVendorStep(platform),
getBuildCppStep(platform),
getBuildZigStep(platform),
getBuildBunStep(platform),
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
],
};
}),
],
};
}
async function main() {
console.log("Checking environment...");
console.log(" - Repository:", getRepository());
console.log(" - Branch:", getBranch());
console.log(" - Commit:", getCommit());
console.log(" - Is Main Branch:", isMainBranch());
console.log(" - Is Merge Queue:", isMergeQueue());
console.log(" - Is Pull Request:", isPullRequest());
const changedFiles = await getChangedFiles();
if (changedFiles) {
console.log(
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
);
if (changedFiles.every(filename => isDocumentation(filename))) {
console.log("Since changed files are only documentation, skipping...");
return;
}
if (changedFiles.every(filename => isTest(filename) || isDocumentation(filename))) {
// TODO: console.log("Since changed files contain tests, skipping build...");
}
}
const pipeline = getPipeline();
const content = toYaml(pipeline);
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
writeFileSync(contentPath, content);
console.log("Generated pipeline:");
console.log(" - Path:", contentPath);
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
}
await main();

View File

@@ -1,790 +0,0 @@
# Build and test Bun on macOS, Linux, and Windows.
# https://buildkite.com/docs/pipelines/defining-steps
#
# If a step has the `robobun: true` label, robobun will listen
# to webhooks from Buildkite and provision a VM to run the step.
#
# Changes to this file will be automatically uploaded on the next run
# for a particular commit.
steps:
# macOS aarch64
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
- key: "darwin-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-test-macos-14"
label: ":darwin: 14 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
- key: "darwin-aarch64-test-macos-13"
label: ":darwin: 13 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
# macOS x64
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
- key: "darwin-x64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-test-macos-14"
label: ":darwin: 14 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
- key: "darwin-x64-test-macos-13"
label: ":darwin: 13 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
# Linux x64
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64"
- key: "linux-x64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-test-debian-12"
label: ":debian: 12 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
# Linux x64-baseline
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
- key: "linux-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-test-debian-12"
label: ":debian: 12 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
# Linux aarch64
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
- key: "linux-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-test-debian-12"
label: ":debian: 12 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2204"
label: ":ubuntu: 22.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2004"
label: ":ubuntu: 20.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
# Windows x64
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64"
- key: "windows-x64-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-test-bun"
label: ":windows: x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
# Windows x64-baseline
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
- key: "windows-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-test-bun"
label: ":windows: x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-baseline-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"

View File

@@ -29,6 +29,10 @@ function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_node() {
assert_command "node" "node" "https://nodejs.org/en/download/"
}
function assert_command() {
local command="$1"
local package="$2"
@@ -92,6 +96,12 @@ assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_node
assert_release
assert_canary
upload_buildkite_pipeline ".buildkite/ci.yml"
run_command node ".buildkite/ci.mjs"
if [ -f ".buildkite/ci.yml" ]; then
upload_buildkite_pipeline ".buildkite/ci.yml"
fi

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -83,6 +83,26 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -92,7 +112,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -106,6 +126,40 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

4
.gitignore vendored
View File

@@ -163,3 +163,7 @@ scripts/env.local
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml

70
.vscode/launch.json generated vendored
View File

@@ -14,7 +14,7 @@
"name": "bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -29,7 +29,7 @@
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -50,7 +50,7 @@
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -65,7 +65,7 @@
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
@@ -80,7 +80,7 @@
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -95,7 +95,7 @@
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -110,7 +110,7 @@
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -131,7 +131,7 @@
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -268,7 +268,7 @@
"name": "bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -283,7 +283,7 @@
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -298,7 +298,7 @@
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -313,7 +313,7 @@
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -328,7 +328,7 @@
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -343,7 +343,7 @@
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -364,7 +364,7 @@
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -401,7 +401,7 @@
"name": "bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -415,7 +415,7 @@
"name": "bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -429,7 +429,7 @@
"name": "bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -481,7 +481,7 @@
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -510,7 +510,7 @@
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -539,7 +539,7 @@
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -568,7 +568,7 @@
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -597,7 +597,7 @@
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -635,7 +635,7 @@
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -822,7 +822,7 @@
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -851,7 +851,7 @@
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -880,7 +880,7 @@
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -909,7 +909,7 @@
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -938,7 +938,7 @@
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -967,7 +967,7 @@
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1005,7 +1005,7 @@
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1070,7 +1070,7 @@
"name": "Windows: bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1095,7 +1095,7 @@
"name": "Windows: bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1124,7 +1124,7 @@
"name": "Windows: bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1196,4 +1196,4 @@
"description": "Usage: bun test [...]",
},
],
}
}

View File

@@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
```
{% /codetabs %}
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
```bash#Fedora
$ sudo dnf install 'dnf-command(copr)'
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
$ sudo dnf install llvm16 clang16 lld16-devel
```
```bash#openSUSE Tumbleweed

2
LATEST
View File

@@ -1 +1 @@
1.1.30
1.1.33

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,11 +1,11 @@
{
"name": "bench",
"scripts": {
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"deps": "cd src && bun run deps",
"build": "cd src && bun run build",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -0,0 +1,14 @@
syntax = "proto3";
package benchmark;
service BenchmarkService {
rpc Ping(Request) returns (Response);
}
message Request {
string message = 1;
}
message Response {
string message = 1;
}

View File

@@ -0,0 +1,33 @@
-----BEGIN CERTIFICATE-----
MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL
BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j
YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE
BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD
VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6
LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/
cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia
SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX
InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8
RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr
uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ
x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ
hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw
5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR
Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G
TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV
FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF
MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11
jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0
GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H
HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb
P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99
p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p
OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo
Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn
Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB
n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK
qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL
-----END CERTIFICATE-----

View File

@@ -0,0 +1,31 @@
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");
const packageDefinition = protoLoader.loadSync("benchmark.proto", {});
const proto = grpc.loadPackageDefinition(packageDefinition).benchmark;
const fs = require("fs");
function ping(call, callback) {
callback(null, { message: "Hello, World" });
}
function main() {
const server = new grpc.Server();
server.addService(proto.BenchmarkService.service, { ping: ping });
const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true");
const port = process.env.PORT || 50051;
const host = process.env.HOST || "localhost";
let credentials;
if (tls) {
const ca = fs.readFileSync("./cert.pem");
const key = fs.readFileSync("./key.pem");
const cert = fs.readFileSync("./cert.pem");
credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]);
} else {
credentials = grpc.ServerCredentials.createInsecure();
}
server.bindAsync(`${host}:${port}`, credentials, () => {
console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`);
});
}
main();

52
bench/grpc-server/key.pem Normal file
View File

@@ -0,0 +1,52 @@
-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN
THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678
menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP
BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL
ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf
v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t
D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV
SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS
8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA
TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV
4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB
IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc
wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV
SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa
WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ
8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t
/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3
cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u
RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5
ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9
uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc
Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0
8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs
B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt
otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS
VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS
TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO
z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J
P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO
auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r
hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD
GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD
Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+
Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw
/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo
+qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD
UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY
aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG
wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP
BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr
vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF
kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r
QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K
Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8
oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf
Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO
eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl
VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f
kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD
z/cCLOrUJfealezimyd8SKPWPeHhrA==
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,15 @@
{
"name": "bench",
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bun:server": "TLS=1 PORT=50051 bun ./index.js",
"node:server": "TLS=1 PORT=50051 node ./index.js",
"bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051",
"bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051"
},
"dependencies": {
"@grpc/grpc-js": "1.12.0",
"@grpc/proto-loader": "0.7.10"
}
}

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
},
"dependencies": {

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js | grep iter",
"bench:node": "$NODE node.mjs | grep iter",
"bench:deno": "$DENO run -A --unstable deno.mjs | grep iter",
"bench:bun": "bun bun.js | grep iter",
"bench:node": "node node.mjs | grep iter",
"bench:deno": "deno run -A --unstable deno.mjs | grep iter",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -3,8 +3,8 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench": "bun run bench:bun && bun run bench:node"
}
}

View File

@@ -0,0 +1,15 @@
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {
const buffer = Buffer.allocUnsafe(size);
const pattern = "x".repeat(fillSize);
bench(`Buffer.fill ${size} bytes with ${fillSize} byte value`, () => {
buffer.fill(pattern);
});
}
}
await run();

View File

@@ -5,10 +5,10 @@
},
"scripts": {
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"bench:deno": "deno run -A --unstable-ffi deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

84
ci/README.md Normal file
View File

@@ -0,0 +1,84 @@
# CI
This directory contains scripts for building CI images for Bun.
## Building
### `macOS`
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
To install the dependencies required, run:
```sh
$ cd ci
$ bun run bootstrap
```
To build a vanilla macOS VM, run:
```sh
$ bun run build:darwin-aarch64-vanilla
```
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
> Note: The image size is 50GB, so make sure you have enough disk space.
If you want to build a specific macOS release, you can run:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: You cannot build a newer release of macOS on an older macOS machine.
To build a macOS VM with software installed to build and test Bun, run:
```sh
$ bun run build:darwin-aarch64
```
## Running
### `macOS`
## How To
### Support a new macOS release
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
```hcl
sonoma = {
distro = "sonoma"
release = "15"
ipsw = "https://updates.cdn-apple.com/..."
}
```
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
4. Test and build the non-vanilla image:
```sh
$ bun run build:darwin-aarch64-15
```
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
5. Publish the images:
```sh
$ bun run login
$ bun run publish:darwin-aarch64-vanilla-15
$ bun run publish:darwin-aarch64-15
```

View File

@@ -0,0 +1,46 @@
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
# See login.sh and optimize.sh for details.
data "external-raw" "boot-script" {
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
}
source "tart-cli" "bun-darwin-aarch64-vanilla" {
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
from_ipsw = local.release.ipsw
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
create_grace_time = "30s"
boot_command = split("\n", data.external-raw.boot-script.result)
headless = true # Disable if you need to debug why the boot_command is not working
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
provisioner "file" {
content = file("scripts/setup-login.sh")
destination = "/tmp/setup-login.sh"
}
provisioner "shell" {
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
}
provisioner "file" {
content = file("scripts/optimize-machine.sh")
destination = "/tmp/optimize-machine.sh"
}
provisioner "shell" {
inline = ["sudo sh /tmp/optimize-machine.sh"]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

44
ci/darwin/image.pkr.hcl Normal file
View File

@@ -0,0 +1,44 @@
# Generates a macOS VM with software installed to build and test Bun.
source "tart-cli" "bun-darwin-aarch64" {
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
headless = true
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64"]
provisioner "file" {
content = file("../../scripts/bootstrap.sh")
destination = "/tmp/bootstrap.sh"
}
provisioner "shell" {
inline = ["CI=true sh /tmp/bootstrap.sh"]
}
provisioner "file" {
source = "darwin/plists/"
destination = "/tmp/"
}
provisioner "shell" {
inline = [
"sudo ls /tmp/",
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.buildkite.buildkite-agent</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/buildkite-agent</string>
<string>start</string>
</array>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false />
</dict>
<key>RunAtLoad</key>
<true />
<key>StandardOutPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>EnvironmentVariables</key>
<dict>
<key>BUILDKITE_AGENT_CONFIG</key>
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
</dict>
<key>LimitLoadToSessionType</key>
<array>
<string>Aqua</string>
<string>LoginWindow</string>
<string>Background</string>
<string>StandardIO</string>
<string>System</string>
</array>
</dict>
</plist>

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscale</string>
<string>up</string>
<string>--ssh</string>
<string>--authkey</string>
<string>${TAILSCALE_AUTHKEY}</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscaled</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

124
ci/darwin/scripts/boot-image.sh Executable file
View File

@@ -0,0 +1,124 @@
#!/bin/sh
# This script generates the boot commands for the macOS installer GUI.
# It is run on your local machine, not inside the VM.
# Sources:
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
echo "Script must be run with variables: release, username, and password" >&2
exit 1
fi
# Hello, hola, bonjour, etc.
echo "<wait120s><spacebar>"
# Select Your Country and Region
echo "<wait30s>italiano<esc>english<enter>"
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
# Written and Spoken Languages
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Accessibility
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Data & Privacy
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Migration Assistant
echo "<wait30s><tab><tab><tab><spacebar>"
# Sign In with Your Apple ID
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you want to skip signing in with an Apple ID?
echo "<wait30s><tab><spacebar>"
# Terms and Conditions
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# I have read and agree to the macOS Software License Agreement
echo "<wait30s><tab><spacebar>"
# Create a Computer Account
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
# Enable Location Services
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you don't want to use Location Services?
echo "<wait30s><tab><spacebar>"
# Select Your Time Zone
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
# Analytics
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Screen Time
echo "<wait30s><tab><spacebar>"
# Siri
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
# Choose Your Look
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
# Enable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
else
# Welcome to Mac
echo "<wait30s><spacebar>"
# Enable Keyboard navigation
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
echo "<wait30s><leftAltOn>q<leftAltOff>"
fi
# Now that the installation is done, open "System Settings"
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
# Navigate to "Sharing"
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
if [ "${release}" = "13" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><down><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
# Open "Remote Login" details
echo "<wait30s><tab><spacebar>"
# Enable "Full Disk Access"
echo "<wait30s><tab><spacebar>"
# Click "Done"
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Disable Voice Over
echo "<leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "14" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
# Disable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "15" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
fi
# Quit System Settings
echo "<wait30s><leftAltOn>q<leftAltOff>"

View File

@@ -0,0 +1,122 @@
#!/bin/sh
# This script optimizes macOS for virtualized environments.
# It disables things like spotlight, screen saver, and sleep.
# Sources:
# - https://github.com/sickcodes/osx-optimizer
# - https://github.com/koding88/MacBook-Optimization-Script
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
disable_software_update() {
execute softwareupdate --schedule off
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
execute defaults write com.apple.commerce AutoUpdate -bool false
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
}
disable_spotlight() {
execute mdutil -i off -a
execute mdutil -E /
}
disable_siri() {
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
execute defaults write com.apple.Siri StatusMenuVisible -bool false
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
}
disable_sleep() {
execute systemsetup -setsleep Never
execute systemsetup -setcomputersleep Never
execute systemsetup -setdisplaysleep Never
execute systemsetup -setharddisksleep Never
}
disable_screen_saver() {
execute defaults write com.apple.screensaver loginWindowIdleTime 0
execute defaults write com.apple.screensaver idleTime 0
}
disable_screen_lock() {
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
}
disable_wallpaper() {
execute defaults write com.apple.loginwindow DesktopPicture ""
}
disable_application_state() {
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
}
disable_accessibility() {
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
execute defaults write com.apple.universalaccess reduceMotion -int 1
execute defaults write com.apple.universalaccess reduceTransparency -int 1
}
disable_dashboard() {
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
execute killall Dock
}
disable_animations() {
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
execute defaults write -g QLPanelAnimationDuration -float 0
execute defaults write com.apple.finder DisableAllAnimations -bool true
}
disable_time_machine() {
execute tmutil disable
}
enable_performance_mode() {
# https://support.apple.com/en-us/101992
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
fi
}
add_terminal_to_desktop() {
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
}
main() {
disable_software_update
disable_spotlight
disable_siri
disable_sleep
disable_screen_saver
disable_screen_lock
disable_wallpaper
disable_application_state
disable_accessibility
disable_dashboard
disable_animations
disable_time_machine
enable_performance_mode
add_terminal_to_desktop
}
main

View File

@@ -0,0 +1,78 @@
#!/bin/sh
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
# Sources:
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
kcpassword() {
passwd="$1"
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
key_len=33
passwd_len=${#passwd_hex}
remainder=$((passwd_len % key_len))
if [ $remainder -ne 0 ]; then
padding=$((key_len - remainder))
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
fi
result=""
i=0
while [ $i -lt ${#passwd_hex} ]; do
for byte in $key; do
[ $i -ge ${#passwd_hex} ] && break
p="${passwd_hex:$i:2}"
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
result="${result}${r}"
i=$((i + 2))
done
done
echo "$result"
}
login() {
username="$1"
password="$2"
enable_passwordless_sudo() {
execute mkdir -p /etc/sudoers.d/
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
}
enable_auto_login() {
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
}
disable_screen_lock() {
execute sysadminctl -screenLock off -password "${password}"
}
enable_passwordless_sudo
enable_auto_login
disable_screen_lock
}
if [ $# -ne 2 ]; then
echo "Usage: $0 <username> <password>" >&2
exit 1
fi
login "$@"

View File

@@ -0,0 +1,78 @@
packer {
required_plugins {
tart = {
version = ">= 1.12.0"
source = "github.com/cirruslabs/tart"
}
external = {
version = ">= 0.0.2"
source = "github.com/joomcode/external"
}
}
}
variable "release" {
type = number
default = 13
}
variable "username" {
type = string
default = "admin"
}
variable "password" {
type = string
default = "admin"
}
variable "cpu_count" {
type = number
default = 2
}
variable "memory_gb" {
type = number
default = 4
}
variable "disk_size_gb" {
type = number
default = 50
}
locals {
sequoia = {
tier = 1
distro = "sequoia"
release = "15"
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
}
sonoma = {
tier = 2
distro = "sonoma"
release = "14"
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
}
ventura = {
tier = 2
distro = "ventura"
release = "13"
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
}
releases = {
15 = local.sequoia
14 = local.sonoma
13 = local.ventura
}
release = local.releases[var.release]
username = var.username
password = var.password
cpu_count = var.cpu_count
memory_gb = var.memory_gb
disk_size_gb = var.disk_size_gb
}

27
ci/package.json Normal file
View File

@@ -0,0 +1,27 @@
{
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
}
}

View File

@@ -79,7 +79,7 @@ endif()
optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION})
if(RELEASE AND LINUX)
if(RELEASE AND LINUX AND CI)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)

View File

@@ -1,6 +1,11 @@
# https://clang.llvm.org/docs/ClangFormat.html
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
file(GLOB BUN_H_SOURCES LIST_DIRECTORIES false ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/*.h
${CWD}/src/bun.js/modules/*.h
)
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} ${BUN_H_SOURCES})
register_command(
TARGET

View File

@@ -18,6 +18,7 @@ register_cmake_command(
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DCARES_SHARED=OFF
-DCARES_BUILD_TOOLS=OFF # this was set to ON?
-DCMAKE_INSTALL_LIBDIR=lib
LIB_PATH
lib
LIBRARIES

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
4c283af60cdae205df5a872530c77e2a6a307d43
82b2c2277a4d570187c07b376557dc5bde81d848
)
set(MIMALLOC_CMAKE_ARGS

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 12e2f46fb01f7c5cf5a992b9414ddfaab32b7110)
set(WEBKIT_VERSION 9b84f43643eff64ab46daec9b860de262c80f5e2)
endif()
if(WEBKIT_LOCAL)

View File

@@ -402,7 +402,7 @@ Bun.serve({
});
```
### Sever name indication (SNI)
### Server name indication (SNI)
To configure the server name indication (SNI) for the server, set the `serverName` field in the `tls` object.

View File

@@ -179,7 +179,7 @@ proc.kill(); // specify an exit code
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
```
```ts
const proc = Bun.spawn(["bun", "--version"]);
proc.unref();
```

View File

@@ -2,7 +2,7 @@
name: Build an app with Next.js and Bun
---
Initialize a Next.js app with `create-next-app`. This automatically installs dependencies using `npm`.
Initialize a Next.js app with `create-next-app`. This will scaffold a new Next.js project and automatically install dependencies.
```sh
$ bun create next-app

View File

@@ -37,7 +37,10 @@ Alternatively, you can create a PM2 configuration file. Create a file named `pm2
module.exports = {
name: "app", // Name of your application
script: "index.ts", // Entry point of your application
interpreter: "~/.bun/bin/bun", // Path to the Bun interpreter
interpreter: "bun", // Bun interpreter
env: {
PATH: `${process.env.HOME}/.bun/bin:${process.env.PATH}`, // Add "~/.bun/bin/bun" to PATH
}
};
```

View File

@@ -2,56 +2,62 @@
name: Build an app with SvelteKit and Bun
---
Use `bun create` to scaffold your app with the `svelte` package. Answer the prompts to select a template and set up your development environment.
Use `sv create my-app` to create a SvelteKit project with SvelteKit CLI. Answer the prompts to select a template and set up your development environment.
```sh
$ bun create svelte@latest my-app
┌ Welcome to SvelteKit!
$ bunx sv create my-app
┌ Welcome to the Svelte CLI! (v0.5.7)
◇ Which Svelte app template?
│ SvelteKit demo app
◇ Which template would you like?
│ SvelteKit demo
◇ Add type checking with TypeScript?
│ Yes, using TypeScript syntax
◇ Add type checking with Typescript?
│ Yes, using Typescript syntax
Select additional options (use arrow keys/space bar)
│ None
Project created
└ Your project is ready!
✔ Typescript
Inside Svelte components, use <script lang="ts">
Install community-maintained integrations:
https://github.com/svelte-add/svelte-add
◇ What would you like to add to your project?
│ none
Which package manager do you want to install dependencies with?
│ bun
Successfully installed dependencies
◇ Project next steps ─────────────────────────────────────────────────────╮
│ │
│ 1: cd my-app │
│ 2: git init && git add -A && git commit -m "Initial commit" (optional)
│ 3: bun run dev -- --open │
│ │
│ To close the dev server, hit Ctrl-C │
│ │
│ Stuck? Visit us at https://svelte.dev/chat │
│ │
├──────────────────────────────────────────────────────────────────────────╯
└ You're all set!
```
---
Once the project is initialized, `cd` into the new project and install dependencies.
```sh
$ cd my-app
$ bun install
```
---
Once the project is initialized, `cd` into the new project. You don't need to run 'bun install' since the dependencies are already installed.
Then start the development server with `bun --bun run dev`.
To run the dev server with Node.js instead of Bun, you can omit the `--bun` flag.
```sh
$ cd my-app
$ bun --bun run dev
$ vite dev
Forced re-optimization of dependencies
VITE v4.4.9 ready in 895 ms
VITE v5.4.10 ready in 424 ms
➜ Local: http://localhost:5173/
➜ Network: use --host to expose
➜ press h to show help
➜ press h + enter to show help
```
---
@@ -75,16 +81,22 @@ Now, make the following changes to your `svelte.config.js`.
```ts-diff
- import adapter from "@sveltejs/adapter-auto";
+ import adapter from "svelte-adapter-bun";
import { vitePreprocess } from "@sveltejs/kit/vite";
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
/** @type {import('@sveltejs/kit').Config} */
const config = {
kit: {
adapter: adapter(),
},
preprocess: vitePreprocess(),
// Consult https://svelte.dev/docs/kit/integrations#preprocessors
// for more information about preprocessors
preprocess: vitePreprocess(),
kit: {
// adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list.
// If your environment is not supported, or you settled on a specific environment, switch out the adapter.
// See https://svelte.dev/docs/kit/adapters for more information about adapters.
adapter: adapter()
}
};
export default config;
```
@@ -93,28 +105,21 @@ Now, make the following changes to your `svelte.config.js`.
To build a production bundle:
```sh
$ bun run build
$ vite build
vite v4.4.9 building SSR bundle for production...
transforming (60) node_modules/@sveltejs/kit/src/utils/escape.js
✓ 98 modules transformed.
Generated an empty chunk: "entries/endpoints/waitlist/_server.ts".
vite v4.4.9 building for production...
✓ 92 modules transformed.
Generated an empty chunk: "7".
.svelte-kit/output/client/_app/version.json 0.03 kB │ gzip: 0.05 kB
...
.svelte-kit/output/server/index.js 86.47 kB
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
✓ built in 7.81s
$ bun --bun run build
$ vite build
vite v5.4.10 building SSR bundle for production...
"confetti" is imported from external module "@neoconfetti/svelte" but never used in "src/routes/sverdle/+page.svelte".
✓ 130 modules transformed.
vite v5.4.10 building for production...
✓ 148 modules transformed.
...
✓ built in 231ms
...
✓ built in 899ms
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
```

View File

@@ -65,7 +65,7 @@ Some methods are not optimized yet.
### [`node:http2`](https://nodejs.org/api/http2.html)
🟡 Client is supported, but server isn't yet.
🟡 Client & server are implemented (95.25% of gRPC's test suite passes). Missing `options.allowHTTP1`, `options.enableConnectProtocol`, ALTSVC extension, and `http2stream.pushStream`.
### [`node:https`](https://nodejs.org/api/https.html)

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.31",
"version": "1.1.34",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -3113,32 +3113,50 @@ declare module "bun" {
* @example \x1b[38;2;100;200;200m
*/
| "ansi"
| "ansi-16"
| "ansi-16m"
/**
* 256 color ANSI color string, for use in terminals which don't support true color
*
* Tries to match closest 24-bit color to 256 color palette
*/
| "ansi256"
| "ansi-256"
/**
* Picks the format that produces the shortest output
*/
| "css"
/**
* Lowercase hex color string without alpha
* @example #aabb11
* @example #ff9800
*/
| "hex"
/**
* Uppercase hex color string without alpha
* @example #FF9800
*/
| "HEX"
/**
* @example hsl(35.764706, 1, 0.5)
*/
| "hsl"
/**
* @example lab(0.72732764, 33.938198, -25.311619)
*/
| "lab"
/**
* @example 16750592
*/
| "number"
/**
* RGB color string without alpha
* rgb(100, 200, 200)
* @example rgb(255, 152, 0)
*/
| "rgb"
/**
* RGB color string with alpha
* rgba(100, 200, 200, 0.5)
* @example rgba(255, 152, 0, 1)
*/
| "rgba"
| "hsl"
| "lab"
| "css"
| "lab"
| "HEX",
| "rgba",
): string | null;
function color(
@@ -3231,7 +3249,7 @@ declare module "bun" {
}
const unsafe: Unsafe;
type DigestEncoding = "hex" | "base64";
type DigestEncoding = "utf8" | "ucs2" | "utf16le" | "latin1" | "ascii" | "base64" | "base64url" | "hex";
/**
* Are ANSI colors enabled for stdin and stdout?

View File

@@ -1673,7 +1673,36 @@ declare global {
groupEnd(): void;
info(...data: any[]): void;
log(...data: any[]): void;
/** Does nothing currently */
/**
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
* logging the argument if it can't be parsed as tabular.
*
* ```js
* // These can't be parsed as tabular data
* console.table(Symbol());
* // Symbol()
*
* console.table(undefined);
* // undefined
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
* // ┌────┬─────┬─────┐
* // │ │ a │ b │
* // ├────┼─────┼─────┤
* // │ 0 │ 1 │ 'Y' │
* // │ 1 │ 'Z' │ 2 │
* // └────┴─────┴─────┘
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
* // ┌────┬─────┐
* // │ │ a │
* // ├────┼─────┤
* // │ 0 │ 1 │
* // │ 1 │ 'Z' │
* // └────┴─────┘
* ```
* @param properties Alternate properties for constructing the table.
*/
table(tabularData?: any, properties?: string[]): void;
/**
* Begin a timer to log with {@link console.timeEnd}

View File

@@ -212,12 +212,13 @@ void us_socket_context_add_server_name(int ssl, struct us_socket_context_t *cont
}
#endif
}
void us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
int us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
#ifndef LIBUS_NO_SSL
if (ssl) {
us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
return us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
}
#endif
return 0;
}
/* Remove SNI context */

View File

@@ -855,6 +855,11 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
}
}
if (ERR_peek_error() != 0) {
free_ssl_context(ssl_context);
return NULL;
}
/* This must be free'd with free_ssl_context, not SSL_CTX_free */
return ssl_context;
}
@@ -1106,6 +1111,8 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
SSL_CTX *create_ssl_context_from_bun_options(
struct us_bun_socket_context_options_t options,
enum create_bun_socket_error_t *err) {
ERR_clear_error();
/* Create the context */
SSL_CTX *ssl_context = SSL_CTX_new(TLS_method());
@@ -1211,6 +1218,9 @@ SSL_CTX *create_ssl_context_from_bun_options(
return NULL;
}
// It may return spurious errors here.
ERR_clear_error();
if (options.reject_unauthorized) {
SSL_CTX_set_verify(ssl_context,
SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT,
@@ -1336,7 +1346,7 @@ void us_internal_ssl_socket_context_add_server_name(
}
}
void us_bun_internal_ssl_socket_context_add_server_name(
int us_bun_internal_ssl_socket_context_add_server_name(
struct us_internal_ssl_socket_context_t *context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user) {
@@ -1344,6 +1354,9 @@ void us_bun_internal_ssl_socket_context_add_server_name(
/* Try and construct an SSL_CTX from options */
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options, &err);
if (ssl_context == NULL) {
return -1;
}
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
@@ -1351,15 +1364,15 @@ void us_bun_internal_ssl_socket_context_add_server_name(
printf("CANNOT SET EX DATA!\n");
abort();
#endif
return -1;
}
/* We do not want to hold any nullptr's in our SNI tree */
if (ssl_context) {
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
}
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
}
return 0;
}
void us_internal_ssl_socket_context_on_server_name(

View File

@@ -302,7 +302,7 @@ void us_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern, struct us_socket_context_options_t options,
void *user);
void us_bun_internal_ssl_socket_context_add_server_name(
int us_bun_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user);

View File

@@ -234,7 +234,7 @@ unsigned short us_socket_context_timestamp(int ssl, us_socket_context_r context)
/* Adds SNI domain and cert in asn1 format */
void us_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_socket_context_options_t options, void *user);
void us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
int us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
void us_socket_context_remove_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern);
void us_socket_context_on_server_name(int ssl, us_socket_context_r context, void (*cb)(us_socket_context_r context, const char *hostname));
void *us_socket_server_name_userdata(int ssl, us_socket_r s);

View File

@@ -106,14 +106,17 @@ public:
/* Server name */
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}) {
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}, bool *success = nullptr) {
/* Do nothing if not even on SSL */
if constexpr (SSL) {
/* First we create a new router for this domain */
auto *domainRouter = new HttpRouter<typename HttpContextData<SSL>::RouterData>();
us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
int result = us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
if (success) {
*success = result == 0;
}
}
return std::move(*this);
@@ -238,6 +241,18 @@ public:
httpContext = HttpContext<SSL>::create(Loop::get(), options);
}
TemplatedApp(HttpContext<SSL> &context) {
httpContext = &context;
}
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
if (!httpContext) {
return nullptr;
}
return new TemplatedApp<SSL>(*httpContext);
}
bool constructorFailed() {
return !httpContext;
}

View File

@@ -0,0 +1,51 @@
const body = process.env.GITHUB_ISSUE_BODY;
const SENTRY_AUTH_TOKEN = process.env.SENTRY_AUTH_TOKEN;
if (!body || !SENTRY_AUTH_TOKEN) {
throw new Error("Missing environment variables");
}
const id = body.indexOf("<!-- sentry_id: ");
const endIdLine = body.indexOf(" -->", id + 1);
if (!(id > -1 && endIdLine > -1)) {
throw new Error("Missing sentry_id");
}
const sentryId = body.slice(id + "<!-- sentry_id: ".length, endIdLine).trim();
if (!sentryId) {
throw new Error("Missing sentry_id");
}
const response = await fetch(`https://sentry.io/api/0/organizations/4507155222364160/eventids/${sentryId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!response.ok) {
throw new Error(`Failed to fetch Sentry event: ${response.statusText}`);
}
const json = await response.json();
const groupId = json?.groupId;
if (!groupId) {
throw new Error("Missing groupId");
}
const issueResponse = await fetch(`https://sentry.io/api/0/issues/${groupId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!issueResponse.ok) {
throw new Error(`Failed to fetch Sentry issue: ${issueResponse.statusText}`);
}
const { shortId, permalink } = await issueResponse.json();
if (!shortId || !permalink) {
throw new Error("Missing shortId or permalink");
}
console.log(`Sentry ID: ${shortId}`);
console.log(`Sentry permalink: ${permalink}`);
await Bun.write("sentry-id.txt", shortId);
await Bun.write("sentry-link.txt", permalink);
export {};

714
scripts/bootstrap.sh Executable file
View File

@@ -0,0 +1,714 @@
#!/bin/sh
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
# If this script does not work on your machine, please open an issue:
# https://github.com/oven-sh/bun/issues
# If you need to make a change to this script, such as upgrading a dependency,
# increment the version number to indicate that a new image should be built.
# Otherwise, the existing image will be retroactively updated.
v="3"
pid=$$
script="$(realpath "$0")"
print() {
echo "$@"
}
error() {
echo "error: $@" >&2
kill -s TERM "$pid"
exit 1
}
execute() {
print "$ $@" >&2
if ! "$@"; then
error "Command failed: $@"
fi
}
execute_sudo() {
if [ "$sudo" = "1" ]; then
execute "$@"
else
execute sudo "$@"
fi
}
execute_non_root() {
if [ "$sudo" = "1" ]; then
execute sudo -u "$user" "$@"
else
execute "$@"
fi
}
which() {
command -v "$1"
}
require() {
path="$(which "$1")"
if ! [ -f "$path" ]; then
error "Command \"$1\" is required, but is not installed."
fi
echo "$path"
}
fetch() {
curl=$(which curl)
if [ -f "$curl" ]; then
execute "$curl" -fsSL "$1"
else
wget=$(which wget)
if [ -f "$wget" ]; then
execute "$wget" -qO- "$1"
else
error "Command \"curl\" or \"wget\" is required, but is not installed."
fi
fi
}
download_file() {
url="$1"
filename="${2:-$(basename "$url")}"
path="$(mktemp -d)/$filename"
fetch "$url" > "$path"
print "$path"
}
compare_version() {
if [ "$1" = "$2" ]; then
echo "0"
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
echo "-1"
else
echo "1"
fi
}
append_to_file() {
file="$1"
content="$2"
if ! [ -f "$file" ]; then
execute mkdir -p "$(dirname "$file")"
execute touch "$file"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" >> "$file"
fi
done
}
append_to_profile() {
content="$1"
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
for profile in $profiles; do
file="$HOME/$profile"
if [ "$ci" = "1" ] || [ -f "$file" ]; then
append_to_file "$file" "$content"
fi
done
}
append_to_path() {
path="$1"
if ! [ -d "$path" ]; then
error "Could not find directory: \"$path\""
fi
append_to_profile "export PATH=\"$path:\$PATH\""
export PATH="$path:$PATH"
}
check_system() {
uname="$(require uname)"
os="$($uname -s)"
case "$os" in
Linux*) os="linux" ;;
Darwin*) os="darwin" ;;
*) error "Unsupported operating system: $os" ;;
esac
arch="$($uname -m)"
case "$arch" in
x86_64 | x64 | amd64) arch="x64" ;;
aarch64 | arm64) arch="aarch64" ;;
*) error "Unsupported architecture: $arch" ;;
esac
kernel="$(uname -r)"
if [ "$os" = "darwin" ]; then
sw_vers="$(which sw_vers)"
if [ -f "$sw_vers" ]; then
distro="$($sw_vers -productName)"
release="$($sw_vers -productVersion)"
fi
if [ "$arch" = "x64" ]; then
sysctl="$(which sysctl)"
if [ -f "$sysctl" ] && [ "$($sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then
arch="aarch64"
rosetta="1"
fi
fi
fi
if [ "$os" = "linux" ] && [ -f /etc/os-release ]; then
. /etc/os-release
if [ -n "$ID" ]; then
distro="$ID"
fi
if [ -n "$VERSION_ID" ]; then
release="$VERSION_ID"
fi
fi
if [ "$os" = "linux" ]; then
rpm="$(which rpm)"
if [ -f "$rpm" ]; then
glibc="$($rpm -q glibc --queryformat '%{VERSION}\n')"
else
ldd="$(which ldd)"
awk="$(which awk)"
if [ -f "$ldd" ] && [ -f "$awk" ]; then
glibc="$($ldd --version | $awk 'NR==1{print $NF}')"
fi
fi
fi
if [ "$os" = "darwin" ]; then
brew="$(which brew)"
pm="brew"
fi
if [ "$os" = "linux" ]; then
apt="$(which apt-get)"
if [ -f "$apt" ]; then
pm="apt"
else
dnf="$(which dnf)"
if [ -f "$dnf" ]; then
pm="dnf"
else
yum="$(which yum)"
if [ -f "$yum" ]; then
pm="yum"
fi
fi
fi
if [ -z "$pm" ]; then
error "No package manager found. (apt, dnf, yum)"
fi
fi
if [ -n "$SUDO_USER" ]; then
user="$SUDO_USER"
else
whoami="$(which whoami)"
if [ -f "$whoami" ]; then
user="$($whoami)"
else
error "Could not determine the current user, set \$USER."
fi
fi
id="$(which id)"
if [ -f "$id" ] && [ "$($id -u)" = "0" ]; then
sudo=1
fi
if [ "$CI" = "true" ]; then
ci=1
fi
print "System information:"
if [ -n "$distro" ]; then
print "| Distro: $distro $release"
fi
print "| Operating system: $os"
print "| Architecture: $arch"
if [ -n "$rosetta" ]; then
print "| Rosetta: true"
fi
if [ -n "$glibc" ]; then
print "| Glibc: $glibc"
fi
print "| Package manager: $pm"
print "| User: $user"
if [ -n "$sudo" ]; then
print "| Sudo: true"
fi
if [ -n "$ci" ]; then
print "| CI: true"
fi
}
package_manager() {
case "$pm" in
apt) DEBIAN_FRONTEND=noninteractive \
execute "$apt" "$@" ;;
dnf) execute dnf "$@" ;;
yum) execute "$yum" "$@" ;;
brew)
if ! [ -f "$(which brew)" ]; then
install_brew
fi
execute_non_root brew "$@"
;;
*) error "Unsupported package manager: $pm" ;;
esac
}
update_packages() {
case "$pm" in
apt)
package_manager update
;;
esac
}
check_package() {
case "$pm" in
apt)
apt-cache policy "$1"
;;
dnf | yum | brew)
package_manager info "$1"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
install_packages() {
case "$pm" in
apt)
package_manager install --yes --no-install-recommends "$@"
;;
dnf)
package_manager install --assumeyes --nodocs --noautoremove --allowerasing "$@"
;;
yum)
package_manager install -y "$@"
;;
brew)
package_manager install --force --formula "$@"
package_manager link --force --overwrite "$@"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
get_version() {
command="$1"
path="$(which "$command")"
if [ -f "$path" ]; then
case "$command" in
go | zig) "$path" version ;;
*) "$path" --version ;;
esac
else
print "not found"
fi
}
install_brew() {
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
NONINTERACTIVE=1 execute_non_root "$bash" "$script"
case "$arch" in
x64)
append_to_path "/usr/local/bin"
;;
aarch64)
append_to_path "/opt/homebrew/bin"
;;
esac
case "$ci" in
1)
append_to_profile "export HOMEBREW_NO_INSTALL_CLEANUP=1"
append_to_profile "export HOMEBREW_NO_AUTO_UPDATE=1"
append_to_profile "export HOMEBREW_NO_ANALYTICS=1"
;;
esac
}
install_common_software() {
case "$pm" in
apt) install_packages \
apt-transport-https \
software-properties-common
;;
dnf) install_packages \
dnf-plugins-core \
tar
;;
esac
install_packages \
bash \
ca-certificates \
curl \
jq \
htop \
gnupg \
git \
unzip \
wget \
zip
install_rosetta
install_nodejs
install_bun
}
install_nodejs() {
version="${1:-"22"}"
if ! [ "$(compare_version "$glibc" "2.27")" = "1" ]; then
version="16"
fi
case "$pm" in
dnf | yum)
bash="$(require bash)"
script=$(download_file "https://rpm.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
apt)
bash="$(require bash)"
script=$(download_file "https://deb.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
esac
install_packages nodejs
}
install_bun() {
bash="$(require bash)"
script=$(download_file "https://bun.sh/install")
version="${1:-"latest"}"
case "$version" in
latest)
execute "$bash" "$script"
;;
*)
execute "$bash" "$script" -s "$version"
;;
esac
append_to_path "$HOME/.bun/bin"
}
install_rosetta() {
case "$os" in
darwin)
if ! [ "$(which arch)" ]; then
execute softwareupdate \
--install-rosetta \
--agree-to-license
fi
;;
esac
}
install_build_essentials() {
case "$pm" in
apt) install_packages \
build-essential \
ninja-build \
xz-utils
;;
dnf | yum) install_packages \
ninja-build \
gcc-c++ \
xz
;;
brew) install_packages \
ninja
;;
esac
install_packages \
make \
cmake \
pkg-config \
python3 \
libtool \
ruby \
perl \
golang
install_llvm
install_ccache
install_rust
install_docker
}
llvm_version_exact() {
case "$os" in
linux)
print "16.0.6"
;;
darwin | windows)
print "18.1.8"
;;
esac
}
llvm_version() {
echo "$(llvm_version_exact)" | cut -d. -f1
}
install_llvm() {
case "$pm" in
apt)
bash="$(require bash)"
script=$(download_file "https://apt.llvm.org/llvm.sh")
execute "$bash" "$script" "$(llvm_version)" all
;;
brew)
install_packages "llvm@$(llvm_version)"
;;
esac
}
install_ccache() {
case "$pm" in
apt | brew)
install_packages ccache
;;
esac
}
install_rust() {
sh="$(require sh)"
script=$(download_file "https://sh.rustup.rs")
execute "$sh" "$script" -y
append_to_path "$HOME/.cargo/bin"
}
install_docker() {
case "$pm" in
brew)
if ! [ -d "/Applications/Docker.app" ]; then
package_manager install docker --cask
fi
;;
*)
case "$distro-$release" in
amzn-2 | amzn-1)
execute amazon-linux-extras install docker
;;
amzn-*)
install_packages docker
;;
*)
sh="$(require sh)"
script=$(download_file "https://get.docker.com")
execute "$sh" "$script"
;;
esac
;;
esac
systemctl="$(which systemctl)"
if [ -f "$systemctl" ]; then
execute "$systemctl" enable docker
fi
}
install_ci_dependencies() {
if ! [ "$ci" = "1" ]; then
return
fi
install_tailscale
install_buildkite
}
install_tailscale() {
case "$os" in
linux)
sh="$(require sh)"
script=$(download_file "https://tailscale.com/install.sh")
execute "$sh" "$script"
;;
darwin)
install_packages go
execute_non_root go install tailscale.com/cmd/tailscale{,d}@latest
append_to_path "$HOME/go/bin"
;;
esac
}
install_buildkite() {
home_dir="/var/lib/buildkite-agent"
config_dir="/etc/buildkite-agent"
config_file="$config_dir/buildkite-agent.cfg"
if ! [ -d "$home_dir" ]; then
execute_sudo mkdir -p "$home_dir"
fi
if ! [ -d "$config_dir" ]; then
execute_sudo mkdir -p "$config_dir"
fi
case "$os" in
linux)
getent="$(require getent)"
if [ -z "$("$getent" passwd buildkite-agent)" ]; then
useradd="$(require useradd)"
execute "$useradd" buildkite-agent \
--system \
--no-create-home \
--home-dir "$home_dir"
fi
if [ -n "$("$getent" group docker)" ]; then
usermod="$(require usermod)"
execute "$usermod" -aG docker buildkite-agent
fi
execute chown -R buildkite-agent:buildkite-agent "$home_dir"
execute chown -R buildkite-agent:buildkite-agent "$config_dir"
;;
darwin)
execute_sudo chown -R "$user:admin" "$home_dir"
execute_sudo chown -R "$user:admin" "$config_dir"
;;
esac
if ! [ -f "$config_file" ]; then
cat <<EOF >"$config_file"
# This is generated by scripts/bootstrap.sh
# https://buildkite.com/docs/agent/v3/configuration
name="%hostname-%random"
tags="v=$v,os=$os,arch=$arch,distro=$distro,release=$release,kernel=$kernel,glibc=$glibc"
build-path="$home_dir/builds"
git-mirrors-path="$home_dir/git"
job-log-path="$home_dir/logs"
plugins-path="$config_dir/plugins"
hooks-path="$config_dir/hooks"
no-ssh-keyscan=true
cancel-grace-period=3600000 # 1 hour
enable-job-log-tmpfile=true
experiment="normalised-upload-paths,resolve-commit-after-checkout,agent-api"
EOF
fi
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")
execute "$bash" "$script"
out_dir="$HOME/.buildkite-agent"
execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/local/bin/buildkite-agent"
execute rm -rf "$out_dir"
}
install_chrome_dependencies() {
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud
case "$pm" in
apt)
install_packages \
fonts-liberation \
libatk-bridge2.0-0 \
libatk1.0-0 \
libc6 \
libcairo2 \
libcups2 \
libdbus-1-3 \
libexpat1 \
libfontconfig1 \
libgbm1 \
libgcc1 \
libglib2.0-0 \
libgtk-3-0 \
libnspr4 \
libnss3 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libstdc++6 \
libx11-6 \
libx11-xcb1 \
libxcb1 \
libxcomposite1 \
libxcursor1 \
libxdamage1 \
libxext6 \
libxfixes3 \
libxi6 \
libxrandr2 \
libxrender1 \
libxss1 \
libxtst6 \
xdg-utils
# Fixes issue in newer version of Ubuntu:
# Package 'libasound2' has no installation candidate
if [ "$(check_package "libasound2t64")" ]; then
install_packages libasound2t64
else
install_packages libasound2
fi
;;
dnf | yum)
install_packages \
alsa-lib \
atk \
cups-libs \
gtk3 \
ipa-gothic-fonts \
libXcomposite \
libXcursor \
libXdamage \
libXext \
libXi \
libXrandr \
libXScrnSaver \
libXtst \
pango \
xorg-x11-fonts-100dpi \
xorg-x11-fonts-75dpi \
xorg-x11-fonts-cyrillic \
xorg-x11-fonts-misc \
xorg-x11-fonts-Type1 \
xorg-x11-utils
;;
esac
}
main() {
check_system
update_packages
install_common_software
install_build_essentials
install_chrome_dependencies
install_ci_dependencies
}
main

View File

@@ -130,7 +130,10 @@ function getCachePath(branch) {
const repository = process.env.BUILDKITE_REPO;
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
const branchKey = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const branchName = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const branchKey = branchName.startsWith("gh-readonly-queue-")
? branchName.slice(18, branchName.indexOf("-pr-"))
: branchName;
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
}

View File

@@ -168,7 +168,10 @@ pub fn init(options: Options) !*DevServer {
else
null;
const app = App.create(.{});
const app = App.create(.{}) orelse {
Output.prettyErrorln("Failed to create app", .{});
return error.AppInitialization;
};
const separate_ssr_graph = if (options.framework.server_components) |sc| sc.separate_ssr_graph else false;

View File

@@ -69,22 +69,22 @@ pub const ResolveMessage = struct {
switch (err) {
error.ModuleNotFound => {
if (strings.eqlComptime(referrer, "bun:main")) {
return try std.fmt.allocPrint(allocator, "Module not found \"{s}\"", .{specifier});
return try std.fmt.allocPrint(allocator, "Module not found '{s}'", .{specifier});
}
if (Resolver.isPackagePath(specifier) and !strings.containsChar(specifier, '/')) {
return try std.fmt.allocPrint(allocator, "Cannot find package \"{s}\" from \"{s}\"", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot find package '{s}' from '{s}'", .{ specifier, referrer });
} else {
return try std.fmt.allocPrint(allocator, "Cannot find module \"{s}\" from \"{s}\"", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot find module '{s}' from '{s}'", .{ specifier, referrer });
}
},
error.InvalidDataURL => {
return try std.fmt.allocPrint(allocator, "Cannot resolve invalid data URL \"{s}\" from \"{s}\"", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot resolve invalid data URL '{s}' from '{s}'", .{ specifier, referrer });
},
else => {
if (Resolver.isPackagePath(specifier)) {
return try std.fmt.allocPrint(allocator, "{s} while resolving package \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer });
return try std.fmt.allocPrint(allocator, "{s} while resolving package '{s}' from '{s}'", .{ @errorName(err), specifier, referrer });
} else {
return try std.fmt.allocPrint(allocator, "{s} while resolving \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer });
return try std.fmt.allocPrint(allocator, "{s} while resolving '{s}' from '{s}'", .{ @errorName(err), specifier, referrer });
}
},
}

View File

@@ -4,7 +4,8 @@
/// Version 5: `require.main === module` no longer marks a module as CJS
/// Version 6: `use strict` is preserved in CommonJS modules when at the top of the file
/// Version 7: Several bundler changes that are likely to impact the runtime as well.
const expected_version = 7;
/// Version 8: Fix for generated symbols
const expected_version = 8;
const bun = @import("root").bun;
const std = @import("std");

View File

@@ -1221,9 +1221,10 @@ pub const Crypto = struct {
pub usingnamespace bun.New(@This());
pub fn init(algorithm: EVP.Algorithm, key: []const u8) ?*HMAC {
const md = algorithm.md() orelse return null;
var ctx: BoringSSL.HMAC_CTX = undefined;
BoringSSL.HMAC_CTX_init(&ctx);
if (BoringSSL.HMAC_Init_ex(&ctx, key.ptr, @intCast(key.len), algorithm.md(), null) != 1) {
if (BoringSSL.HMAC_Init_ex(&ctx, key.ptr, @intCast(key.len), md, null) != 1) {
BoringSSL.HMAC_CTX_cleanup(&ctx);
return null;
}
@@ -2645,7 +2646,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
@@ -2714,7 +2715,7 @@ pub const Crypto = struct {
BoringSSL.ERR_clear_error();
globalThis.throwValue(instance);
} else {
globalThis.throwTODO("HMAC is not supported for this algorithm");
globalThis.throwTODO("HMAC is not supported for this algorithm yet");
}
}
return null;
@@ -2833,7 +2834,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.throwInvalidArguments("Unknown encoding: {}", .{str.*});
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
@@ -2964,8 +2965,16 @@ pub const Crypto = struct {
switch (string_or_buffer) {
inline else => |*str| {
defer str.deinit();
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
return JSC.JSValue.zero;
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
if (encoding == .buffer) {
return hashByNameInnerToBytes(globalThis, Algorithm, input, null);
}
return hashByNameInnerToString(globalThis, Algorithm, input, encoding);
},
.buffer => |buffer| {
return hashByNameInnerToBytes(globalThis, Algorithm, input, buffer.buffer);
@@ -2975,6 +2984,23 @@ pub const Crypto = struct {
return hashByNameInnerToBytes(globalThis, Algorithm, input, null);
}
fn hashByNameInnerToString(globalThis: *JSGlobalObject, comptime Algorithm: type, input: JSC.Node.BlobOrStringOrBuffer, encoding: JSC.Node.Encoding) JSC.JSValue {
defer input.deinit();
if (input == .blob and input.blob.isBunFile()) {
globalThis.throw("Bun.file() is not supported here yet (it needs an async version)", .{});
return .zero;
}
var h = Algorithm.init(.{});
h.update(input.slice());
var out: [digestLength(Algorithm)]u8 = undefined;
h.final(&out);
return encoding.encodeWithSize(globalThis, digestLength(Algorithm), &out);
}
fn hashByNameInnerToBytes(globalThis: *JSGlobalObject, comptime Algorithm: type, input: JSC.Node.BlobOrStringOrBuffer, output: ?JSC.ArrayBuffer) JSC.JSValue {
defer input.deinit();
@@ -3064,6 +3090,7 @@ pub const Crypto = struct {
fn StaticCryptoHasher(comptime Hasher: type, comptime name: [:0]const u8) type {
return struct {
hashing: Hasher = Hasher{},
digested: bool = false,
const ThisHasher = @This();
@@ -3155,7 +3182,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
@@ -3185,6 +3212,10 @@ pub const Crypto = struct {
}
pub fn update(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue {
if (this.digested) {
globalThis.ERR_INVALID_STATE(name ++ " hasher already digested, create a new instance to update", .{}).throw();
return .zero;
}
const thisValue = callframe.this();
const input = callframe.argument(0);
const buffer = JSC.Node.BlobOrStringOrBuffer.fromJS(globalThis, globalThis.bunVM().allocator, input) orelse {
@@ -3206,12 +3237,16 @@ pub const Crypto = struct {
globalThis: *JSGlobalObject,
output: ?JSC.Node.StringOrBuffer,
) JSC.JSValue {
if (this.digested) {
globalThis.ERR_INVALID_STATE(name ++ " hasher already digested, create a new instance to digest again", .{}).throw();
return .zero;
}
if (output) |*string_or_buffer| {
switch (string_or_buffer.*) {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.throwInvalidArguments("Unknown encoding: \"{s}\"", .{str.slice()});
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
@@ -3244,6 +3279,7 @@ pub const Crypto = struct {
}
this.hashing.final(output_digest_slice);
this.digested = true;
if (output) |output_buf| {
return output_buf.value;
@@ -3267,6 +3303,7 @@ pub const Crypto = struct {
const output_digest_slice: *Hasher.Digest = &output_digest_buf;
this.hashing.final(output_digest_slice);
this.digested = true;
return encoding.encodeWithSize(globalThis, Hasher.digest, output_digest_slice);
}
@@ -3305,25 +3342,24 @@ pub fn serve(
const exception = &exception_;
var args = JSC.Node.ArgumentsSlice.init(globalObject.bunVM(), arguments);
var config_ = JSC.API.ServerConfig.fromJS(globalObject.ptr(), &args, exception);
var config: JSC.API.ServerConfig = .{};
JSC.API.ServerConfig.fromJS(globalObject, &config, &args, exception);
if (exception[0] != null) {
config_.deinit();
config.deinit();
globalObject.throwValue(exception_[0].?.value());
return .undefined;
return .zero;
}
if (globalObject.hasException()) {
config_.deinit();
config.deinit();
return .zero;
}
break :brk config_;
break :brk config;
};
var exception_value: *JSC.JSValue = undefined;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
if (config.id.len == 0) {
@@ -3358,98 +3394,43 @@ pub fn serve(
}
}
// Listen happens on the next tick!
// This is so we can return a Server object
if (config.ssl_config != null) {
if (config.development) {
var server = JSC.API.DebugHTTPSServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!server.thisObject.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(server.thisObject);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
switch (config.ssl_config != null) {
inline else => |has_ssl_config| {
switch (config.development) {
inline else => |development| {
const ServerType = comptime switch (development) {
true => switch (has_ssl_config) {
true => JSC.API.DebugHTTPSServer,
false => JSC.API.DebugHTTPServer,
},
false => switch (has_ssl_config) {
true => JSC.API.HTTPSServer,
false => JSC.API.HTTPServer,
},
};
server.thisObject = obj;
var server = ServerType.init(config, globalObject);
if (globalObject.hasException()) {
return .zero;
}
server.listen();
if (globalObject.hasException()) {
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
} else {
var server = JSC.API.HTTPSServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
},
}
return obj;
}
} else {
if (config.development) {
var server = JSC.API.DebugHTTPServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
} else {
var server = JSC.API.HTTPServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
}
},
}
unreachable;

View File

@@ -1141,6 +1141,7 @@ pub const H2FrameParser = struct {
this.signal = null;
signal.deinit();
}
JSC.VirtualMachine.get().eventLoop().processGCTimer();
}
};
@@ -1611,7 +1612,7 @@ pub const H2FrameParser = struct {
// fallback to onWrite non-native callback
const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject);
const result = this.call(.onWrite, output_value);
const code = result.to(i32);
const code = if (result.isNumber()) result.to(i32) else -1;
switch (code) {
-1 => {
// dropped
@@ -1757,7 +1758,7 @@ pub const H2FrameParser = struct {
return data.len;
}
pub fn decodeHeaderBlock(this: *H2FrameParser, payload: []const u8, stream: *Stream, flags: u8) *Stream {
pub fn decodeHeaderBlock(this: *H2FrameParser, payload: []const u8, stream: *Stream, flags: u8) ?*Stream {
log("decodeHeaderBlock isSever: {}", .{this.isServer});
var offset: usize = 0;
@@ -1776,7 +1777,9 @@ pub const H2FrameParser = struct {
log("header {s} {s}", .{ header.name, header.value });
if (this.isServer and strings.eqlComptime(header.name, ":status")) {
this.sendGoAway(stream_id, ErrorCode.PROTOCOL_ERROR, "Server received :status header", this.lastStreamID, true);
return this.streams.getEntry(stream_id).?.value_ptr;
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
}
count += 1;
if (this.maxHeaderListPairs < count) {
@@ -1786,7 +1789,8 @@ pub const H2FrameParser = struct {
} else {
this.endStream(stream, ErrorCode.ENHANCE_YOUR_CALM);
}
return this.streams.getEntry(stream_id).?.value_ptr;
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
}
const output = brk: {
@@ -1817,7 +1821,8 @@ pub const H2FrameParser = struct {
this.dispatchWith3Extra(.onStreamHeaders, stream.getIdentifier(), headers, sensitiveHeaders, JSC.JSValue.jsNumber(flags));
// callbacks can change the Stream ptr in this case we always return the new one
return this.streams.getEntry(stream_id).?.value_ptr;
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
}
pub fn handleDataFrame(this: *H2FrameParser, frame: FrameHeader, data: []const u8, stream_: ?*Stream) usize {
@@ -1882,7 +1887,8 @@ pub const H2FrameParser = struct {
this.currentFrame = null;
if (emitted) {
// we need to revalidate the stream ptr after emitting onStreamData
stream = this.streams.getEntry(frame.streamIdentifier).?.value_ptr;
const entry = this.streams.getEntry(frame.streamIdentifier) orelse return end;
stream = entry.value_ptr;
}
if (frame.flags & @intFromEnum(DataFrameFlags.END_STREAM) != 0) {
const identifier = stream.getIdentifier();
@@ -2029,7 +2035,10 @@ pub const H2FrameParser = struct {
}
if (handleIncommingPayload(this, data, frame.streamIdentifier)) |content| {
const payload = content.data;
stream = this.decodeHeaderBlock(payload[0..payload.len], stream, frame.flags);
stream = this.decodeHeaderBlock(payload[0..payload.len], stream, frame.flags) orelse {
this.readBuffer.reset();
return content.end;
};
this.readBuffer.reset();
if (frame.flags & @intFromEnum(HeadersFrameFlags.END_HEADERS) != 0) {
stream.isWaitingMoreHeaders = false;
@@ -2092,7 +2101,10 @@ pub const H2FrameParser = struct {
this.sendGoAway(frame.streamIdentifier, ErrorCode.FRAME_SIZE_ERROR, "invalid Headers frame size", this.lastStreamID, true);
return data.len;
}
stream = this.decodeHeaderBlock(payload[offset..end], stream, frame.flags);
stream = this.decodeHeaderBlock(payload[offset..end], stream, frame.flags) orelse {
this.readBuffer.reset();
return content.end;
};
this.readBuffer.reset();
stream.isWaitingMoreHeaders = frame.flags & @intFromEnum(HeadersFrameFlags.END_HEADERS) == 0;
if (frame.flags & @intFromEnum(HeadersFrameFlags.END_STREAM) != 0) {
@@ -3253,7 +3265,26 @@ pub const H2FrameParser = struct {
}
return array;
}
pub fn emitAbortToAllStreams(this: *H2FrameParser, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue {
JSC.markBinding(@src());
var it = StreamResumableIterator.init(this);
while (it.next()) |stream| {
// this is the oposite logic of emitErrorToallStreams, in this case we wanna to cancel this streams
if (this.isServer) {
if (stream.id % 2 == 0) continue;
} else if (stream.id % 2 != 0) continue;
if (stream.state != .CLOSED) {
const old_state = stream.state;
stream.state = .CLOSED;
stream.rstCode = @intFromEnum(ErrorCode.CANCEL);
const identifier = stream.getIdentifier();
identifier.ensureStillAlive();
stream.freeResources(this, false);
this.dispatchWith2Extra(.onAborted, identifier, .undefined, JSC.JSValue.jsNumber(@intFromEnum(old_state)));
}
}
return .undefined;
}
pub fn emitErrorToAllStreams(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue {
JSC.markBinding(@src());
@@ -3265,6 +3296,9 @@ pub const H2FrameParser = struct {
var it = StreamResumableIterator.init(this);
while (it.next()) |stream| {
if (this.isServer) {
if (stream.id % 2 != 0) continue;
} else if (stream.id % 2 == 0) continue;
if (stream.state != .CLOSED) {
stream.state = .CLOSED;
stream.rstCode = args_list.ptr[0].to(u32);
@@ -3675,6 +3709,7 @@ pub const H2FrameParser = struct {
}
const socket_js = args_list.ptr[0];
this.detachNativeSocket();
if (JSTLSSocket.fromJS(socket_js)) |socket| {
log("TLSSocket attached", .{});
if (socket.attachNativeCallback(.{ .h2 = this })) {
@@ -3859,17 +3894,15 @@ pub const H2FrameParser = struct {
}
return this;
}
pub fn deinit(this: *H2FrameParser) void {
log("deinit", .{});
defer {
if (ENABLE_ALLOCATOR_POOL) {
H2FrameParser.pool.?.put(this);
} else {
this.destroy();
}
}
pub fn detachFromJS(this: *H2FrameParser, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
JSC.markBinding(@src());
this.detach(false);
return .undefined;
}
/// be careful when calling detach be sure that the socket is closed and the parser not accesible anymore
/// this function can be called multiple times, it will erase stream info
pub fn detach(this: *H2FrameParser, comptime finalizing: bool) void {
this.flushCorked();
this.detachNativeSocket();
this.strong_ctx.deinit();
this.handlers.deinit();
@@ -3886,9 +3919,24 @@ pub const H2FrameParser = struct {
}
var it = this.streams.valueIterator();
while (it.next()) |stream| {
stream.freeResources(this, true);
stream.freeResources(this, finalizing);
}
this.streams.deinit();
var streams = this.streams;
defer streams.deinit();
this.streams = bun.U32HashMap(Stream).init(bun.default_allocator);
}
pub fn deinit(this: *H2FrameParser) void {
log("deinit", .{});
defer {
if (ENABLE_ALLOCATOR_POOL) {
H2FrameParser.pool.?.put(this);
} else {
this.destroy();
}
}
this.detach(true);
}
pub fn finalize(

View File

@@ -1295,17 +1295,13 @@ fn selectALPNCallback(
if (protos.len == 0) {
return BoringSSL.SSL_TLSEXT_ERR_NOACK;
}
const status = BoringSSL.SSL_select_next_proto(bun.cast([*c][*c]u8, out), outlen, protos.ptr, @as(c_uint, @intCast(protos.len)), in, inlen);
// Previous versions of Node.js returned SSL_TLSEXT_ERR_NOACK if no protocol
// match was found. This would neither cause a fatal alert nor would it result
// in a useful ALPN response as part of the Server Hello message.
// We now return SSL_TLSEXT_ERR_ALERT_FATAL in that case as per Section 3.2
// of RFC 7301, which causes a fatal no_application_protocol alert.
const expected = if (comptime BoringSSL.OPENSSL_NPN_NEGOTIATED == 1) BoringSSL.SSL_TLSEXT_ERR_OK else BoringSSL.SSL_TLSEXT_ERR_ALERT_FATAL;
return if (status == expected) 1 else 0;
return if (status == BoringSSL.OPENSSL_NPN_NEGOTIATED) BoringSSL.SSL_TLSEXT_ERR_OK else BoringSSL.SSL_TLSEXT_ERR_ALERT_FATAL;
} else {
return BoringSSL.SSL_TLSEXT_ERR_NOACK;
}

View File

@@ -37,6 +37,10 @@ export default [
fn: "flushFromJS",
length: 0,
},
detach: {
fn: "detachFromJS",
length: 0,
},
rstStream: {
fn: "rstStream",
length: 1,
@@ -93,6 +97,10 @@ export default [
fn: "emitErrorToAllStreams",
length: 1,
},
emitAbortToAllStreams: {
fn: "emitAbortToAllStreams",
length: 0,
},
getNextStream: {
fn: "getNextStream",
length: 0,

View File

@@ -1174,11 +1174,16 @@ pub const ServerConfig = struct {
}
};
pub fn fromJS(global: *JSC.JSGlobalObject, arguments: *JSC.Node.ArgumentsSlice, exception: JSC.C.ExceptionRef) ServerConfig {
pub fn fromJS(
global: *JSC.JSGlobalObject,
args: *ServerConfig,
arguments: *JSC.Node.ArgumentsSlice,
exception: JSC.C.ExceptionRef,
) void {
const vm = arguments.vm;
const env = vm.bundler.env;
var args = ServerConfig{
args.* = .{
.address = .{
.tcp = .{
.port = 3000,
@@ -1236,13 +1241,13 @@ pub const ServerConfig = struct {
if (arguments.next()) |arg| {
if (!arg.isObject()) {
JSC.throwInvalidArguments("Bun.serve expects an object", .{}, global, exception);
return args;
return;
}
if (arg.get(global, "static")) |static| {
if (!static.isObject()) {
JSC.throwInvalidArguments("Bun.serve expects 'static' to be an object shaped like { [pathname: string]: Response }", .{}, global, exception);
return args;
return;
}
var iter = JSC.JSPropertyIterator(.{
@@ -1259,13 +1264,13 @@ pub const ServerConfig = struct {
if (path.len == 0 or path[0] != '/') {
bun.default_allocator.free(path);
JSC.throwInvalidArguments("Invalid static route \"{s}\". path must start with '/'", .{path}, global, exception);
return args;
return;
}
if (!is_ascii) {
bun.default_allocator.free(path);
JSC.throwInvalidArguments("Invalid static route \"{s}\". Please encode all non-ASCII characters in the path.", .{path}, global, exception);
return args;
return;
}
if (StaticRoute.fromJS(global, value)) |route| {
@@ -1275,28 +1280,28 @@ pub const ServerConfig = struct {
}) catch bun.outOfMemory();
} else if (global.hasException()) {
bun.default_allocator.free(path);
return args;
return;
} else {
Output.panic("Internal error: expected exception or static route", .{});
}
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.get(global, "idleTimeout")) |value| {
if (!value.isUndefinedOrNull()) {
if (!value.isAnyInt()) {
JSC.throwInvalidArguments("Bun.serve expects idleTimeout to be an integer", .{}, global, exception);
return args;
return;
}
args.has_idleTimeout = true;
const idleTimeout: u64 = @intCast(@max(value.toInt64(), 0));
if (idleTimeout > 255) {
JSC.throwInvalidArguments("Bun.serve expects idleTimeout to be 255 or less", .{}, global, exception);
return args;
return;
}
args.idleTimeout = @truncate(idleTimeout);
@@ -1309,7 +1314,7 @@ pub const ServerConfig = struct {
if (args.ssl_config) |*conf| {
conf.deinit();
}
return args;
return;
}
if (WebSocketServer.onCreate(global, websocket_object)) |wss| {
@@ -1318,10 +1323,10 @@ pub const ServerConfig = struct {
if (args.ssl_config) |*conf| {
conf.deinit();
}
return args;
return;
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "port")) |port_| {
args.address.tcp.port = @as(
@@ -1333,7 +1338,7 @@ pub const ServerConfig = struct {
);
port = args.address.tcp.port;
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "baseURI")) |baseURI| {
var sliced = baseURI.toSlice(global, bun.default_allocator);
@@ -1343,7 +1348,7 @@ pub const ServerConfig = struct {
args.base_uri = bun.default_allocator.dupe(u8, sliced.slice()) catch unreachable;
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "hostname") orelse arg.getTruthy(global, "host")) |host| {
const host_str = host.toSlice(
@@ -1357,7 +1362,7 @@ pub const ServerConfig = struct {
has_hostname = true;
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "unix")) |unix| {
const unix_str = unix.toSlice(
@@ -1368,13 +1373,13 @@ pub const ServerConfig = struct {
if (unix_str.len > 0) {
if (has_hostname) {
JSC.throwInvalidArguments("Cannot specify both hostname and unix", .{}, global, exception);
return args;
return;
}
args.address = .{ .unix = bun.default_allocator.dupeZ(u8, unix_str.slice()) catch unreachable };
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.get(global, "id")) |id| {
if (id.isUndefinedOrNull()) {
@@ -1392,59 +1397,59 @@ pub const ServerConfig = struct {
}
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.get(global, "development")) |dev| {
args.development = dev.coerce(bool, global);
args.reuse_port = !args.development;
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.get(global, "reusePort")) |dev| {
args.reuse_port = dev.coerce(bool, global);
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.get(global, "inspector")) |inspector| {
args.inspector = inspector.coerce(bool, global);
if (args.inspector and !args.development) {
JSC.throwInvalidArguments("Cannot enable inspector in production. Please set development: true in Bun.serve()", .{}, global, exception);
return args;
return;
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "maxRequestBodySize")) |max_request_body_size| {
if (max_request_body_size.isNumber()) {
args.max_request_body_size = @as(u64, @intCast(@max(0, max_request_body_size.toInt64())));
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthyComptime(global, "error")) |onError| {
if (!onError.isCallable(global.vm())) {
JSC.throwInvalidArguments("Expected error to be a function", .{}, global, exception);
return args;
return;
}
const onErrorSnapshot = onError.withAsyncContextIfNeeded(global);
args.onError = onErrorSnapshot;
onErrorSnapshot.protect();
}
if (global.hasException()) return args;
if (global.hasException()) return;
if (arg.getTruthy(global, "fetch")) |onRequest_| {
if (!onRequest_.isCallable(global.vm())) {
JSC.throwInvalidArguments("Expected fetch() to be a function", .{}, global, exception);
return args;
return;
}
const onRequest = onRequest_.withAsyncContextIfNeeded(global);
JSC.C.JSValueProtect(global, onRequest.asObjectRef());
args.onRequest = onRequest;
} else {
if (global.hasException()) return args;
if (global.hasException()) return;
JSC.throwInvalidArguments("Expected fetch() to be a function", .{}, global, exception);
return args;
return;
}
if (arg.getTruthy(global, "tls")) |tls| {
@@ -1452,33 +1457,35 @@ pub const ServerConfig = struct {
var value_iter = tls.arrayIterator(global);
if (value_iter.len == 1) {
JSC.throwInvalidArguments("tls option expects at least 1 tls object", .{}, global, exception);
return args;
return;
}
while (value_iter.next()) |item| {
if (SSLConfig.inJS(vm, global, item, exception)) |ssl_config| {
if (args.ssl_config == null) {
args.ssl_config = ssl_config;
} else {
if (ssl_config.server_name == null or std.mem.span(ssl_config.server_name).len == 0) {
var config = ssl_config;
defer config.deinit();
JSC.throwInvalidArguments("SNI tls object must have a serverName", .{}, global, exception);
return args;
}
if (args.sni == null) {
args.sni = bun.BabyList(SSLConfig).initCapacity(bun.default_allocator, value_iter.len - 1) catch bun.outOfMemory();
}
args.sni.?.push(bun.default_allocator, ssl_config) catch bun.outOfMemory();
var ssl_config = SSLConfig.inJS(vm, global, item, exception) orelse {
if (exception.* != null) {
return;
}
}
if (exception.* != null) {
return args;
}
if (global.hasException()) {
return;
}
if (global.hasException()) {
return args;
// Backwards-compatibility; we ignored empty tls objects.
continue;
};
if (args.ssl_config == null) {
args.ssl_config = ssl_config;
} else {
if (ssl_config.server_name == null or std.mem.span(ssl_config.server_name).len == 0) {
defer ssl_config.deinit();
JSC.throwInvalidArguments("SNI tls object must have a serverName", .{}, global, exception);
return;
}
if (args.sni == null) {
args.sni = bun.BabyList(SSLConfig).initCapacity(bun.default_allocator, value_iter.len - 1) catch bun.outOfMemory();
}
args.sni.?.push(bun.default_allocator, ssl_config) catch bun.outOfMemory();
}
}
} else {
@@ -1487,15 +1494,15 @@ pub const ServerConfig = struct {
}
if (exception.* != null) {
return args;
return;
}
if (global.hasException()) {
return args;
return;
}
}
}
if (global.hasException()) return args;
if (global.hasException()) return;
// @compatibility Bun v0.x - v0.2.1
// this used to be top-level, now it's "tls" object
@@ -1505,16 +1512,16 @@ pub const ServerConfig = struct {
}
if (exception.* != null) {
return args;
return;
}
if (global.hasException()) {
return args;
return;
}
}
} else {
JSC.throwInvalidArguments("Bun.serve expects an object", .{}, global, exception);
return args;
return;
}
if (args.base_uri.len > 0) {
@@ -1523,14 +1530,14 @@ pub const ServerConfig = struct {
JSC.throwInvalidArguments("baseURI must have a hostname", .{}, global, exception);
bun.default_allocator.free(@constCast(args.base_uri));
args.base_uri = "";
return args;
return;
}
if (!strings.isAllASCII(args.base_uri)) {
JSC.throwInvalidArguments("Unicode baseURI must already be encoded for now.\nnew URL(baseuRI).toString() should do the trick.", .{}, global, exception);
bun.default_allocator.free(@constCast(args.base_uri));
args.base_uri = "";
return args;
return;
}
if (args.base_url.protocol.len == 0) {
@@ -1598,7 +1605,7 @@ pub const ServerConfig = struct {
JSC.throwInvalidArguments("Unicode hostnames must already be encoded for now.\nnew URL(input).hostname should do the trick.", .{}, global, exception);
bun.default_allocator.free(@constCast(args.base_uri));
args.base_uri = "";
return args;
return;
}
args.base_url = URL.parse(args.base_uri);
@@ -1610,17 +1617,17 @@ pub const ServerConfig = struct {
JSC.throwInvalidArguments("baseURI must have a hostname", .{}, global, exception);
bun.default_allocator.free(@constCast(args.base_uri));
args.base_uri = "";
return args;
return;
}
if (args.base_url.username.len > 0 or args.base_url.password.len > 0) {
JSC.throwInvalidArguments("baseURI can't have a username or password", .{}, global, exception);
bun.default_allocator.free(@constCast(args.base_uri));
args.base_uri = "";
return args;
return;
}
return args;
return;
}
};
@@ -5795,7 +5802,8 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
listener: ?*App.ListenSocket = null,
thisObject: JSC.JSValue = JSC.JSValue.zero,
app: *App = undefined,
/// Potentially null before listen() is called, and once .destroy() is called.
app: ?*App = null,
vm: *JSC.VirtualMachine = undefined,
globalThis: *JSGlobalObject,
base_url_string_for_joining: string = "",
@@ -5807,7 +5815,6 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
listen_callback: JSC.AnyTask = undefined,
allocator: std.mem.Allocator,
poll_ref: Async.KeepAlive = .{},
temporary_url_buffer: std.ArrayListUnmanaged(u8) = .{},
cached_hostname: bun.String = bun.String.empty,
@@ -5849,7 +5856,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
return JSValue.jsNumber(0);
}
return JSValue.jsNumber((this.app.num_subscribers(topic.slice())));
return JSValue.jsNumber((this.app.?.num_subscribers(topic.slice())));
}
pub usingnamespace NamespaceType;
@@ -5895,7 +5902,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
if (this.config.websocket == null)
return JSValue.jsNumber(0);
const app = this.app;
const app = this.app.?;
if (topic.len == 0) {
httplog("publish() topic invalid", .{});
@@ -6119,7 +6126,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
pub fn onReloadFromZig(this: *ThisServer, new_config: *ServerConfig, globalThis: *JSC.JSGlobalObject) void {
httplog("onReload", .{});
this.app.clearRoutes();
this.app.?.clearRoutes();
// only reload those two
if (this.config.onRequest != new_config.onRequest) {
@@ -6167,7 +6174,9 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
defer args_slice.deinit();
var exception_ref = [_]JSC.C.JSValueRef{null};
const exception: JSC.C.ExceptionRef = &exception_ref;
var new_config = ServerConfig.fromJS(globalThis, &args_slice, exception);
var new_config: ServerConfig = .{};
ServerConfig.fromJS(globalThis, &new_config, &args_slice, exception);
if (exception.* != null) {
new_config.deinit();
globalThis.throwValue(exception_ref[0].?.value());
@@ -6565,7 +6574,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
ws.handler.app = null;
}
this.flags.terminated = true;
this.app.close();
this.app.?.close();
}
}
@@ -6588,7 +6597,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
if (!this.flags.terminated) {
this.flags.terminated = true;
this.app.close();
this.app.?.close();
}
const task = bun.default_allocator.create(JSC.AnyTask) catch unreachable;
@@ -6602,7 +6611,11 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
this.all_closed_promise.deinit();
this.config.deinit();
this.app.destroy();
if (this.app) |app| {
this.app = null;
app.destroy();
}
this.destroy();
}
@@ -6638,7 +6651,8 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
noinline fn onListenFailed(this: *ThisServer) void {
httplog("onListenFailed", .{});
this.unref();
const globalThis = this.globalThis;
var error_instance = JSC.JSValue.zero;
var output_buf: [4096]u8 = undefined;
@@ -6691,7 +6705,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
if (written > 0) {
const message = output_buf[0..written];
error_instance = this.globalThis.createErrorInstance("OpenSSL {s}", .{message});
error_instance = globalThis.createErrorInstance("OpenSSL {s}", .{message});
BoringSSL.ERR_clear_error();
}
}
@@ -6708,7 +6722,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
.message = bun.String.init(std.fmt.bufPrint(&output_buf, "permission denied {s}:{d}", .{ tcp.hostname orelse "0.0.0.0", tcp.port }) catch "Failed to start server"),
.code = bun.String.static("EACCES"),
.syscall = bun.String.static("listen"),
}).toErrorInstance(this.globalThis);
}).toErrorInstance(globalThis);
break :error_set;
}
}
@@ -6716,7 +6730,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
.message = bun.String.init(std.fmt.bufPrint(&output_buf, "Failed to start server. Is port {d} in use?", .{tcp.port}) catch "Failed to start server"),
.code = bun.String.static("EADDRINUSE"),
.syscall = bun.String.static("listen"),
}).toErrorInstance(this.globalThis);
}).toErrorInstance(globalThis);
}
},
.unix => |unix| {
@@ -6726,27 +6740,20 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
.message = bun.String.init(std.fmt.bufPrint(&output_buf, "Failed to listen on unix socket {}", .{bun.fmt.QuotedFormatter{ .text = unix }}) catch "Failed to start server"),
.code = bun.String.static("EADDRINUSE"),
.syscall = bun.String.static("listen"),
}).toErrorInstance(this.globalThis);
}).toErrorInstance(globalThis);
},
else => |e| {
var sys_err = bun.sys.Error.fromCode(e, .listen);
sys_err.path = unix;
error_instance = sys_err.toJSC(this.globalThis);
error_instance = sys_err.toJSC(globalThis);
},
}
},
}
}
// store the exception in here
// toErrorInstance clones the string
error_instance.ensureStillAlive();
error_instance.protect();
this.thisObject = error_instance;
// reference it in stack memory
this.thisObject.ensureStillAlive();
return;
globalThis.throwValue(error_instance);
}
pub fn onListen(this: *ThisServer, socket: ?*App.ListenSocket) void {
@@ -7075,19 +7082,20 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
}
fn setRoutes(this: *ThisServer) void {
const app = this.app.?;
if (this.config.static_routes.items.len > 0) {
this.config.applyStaticRoutes(
ssl_enabled,
AnyServer.from(this),
this.app,
app,
);
}
if (this.config.websocket) |*websocket| {
websocket.globalObject = this.globalThis;
websocket.handler.app = this.app;
websocket.handler.app = app;
websocket.handler.flags.ssl = ssl_enabled;
this.app.ws(
app.ws(
"/*",
this,
0,
@@ -7095,63 +7103,115 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
);
}
this.app.any("/*", *ThisServer, this, onRequest);
app.any("/*", *ThisServer, this, onRequest);
if (comptime debug_mode) {
this.app.get("/bun:info", *ThisServer, this, onBunInfoRequest);
app.get("/bun:info", *ThisServer, this, onBunInfoRequest);
if (this.config.inspector) {
JSC.markBinding(@src());
Bun__addInspector(ssl_enabled, this.app, this.globalThis);
Bun__addInspector(ssl_enabled, app, this.globalThis);
}
this.app.get("/src:/*", *ThisServer, this, onSrcRequest);
app.get("/src:/*", *ThisServer, this, onSrcRequest);
}
}
pub fn listen(this: *ThisServer) void {
httplog("listen", .{});
var app: *App = undefined;
const globalThis = this.globalThis;
if (ssl_enabled) {
BoringSSL.load();
const ssl_config = this.config.ssl_config orelse @panic("Assertion failure: ssl_config");
const ssl_options = ssl_config.asUSockets();
this.app = App.create(ssl_options);
app = App.create(ssl_options) orelse {
if (!globalThis.hasException()) {
if (!throwSSLErrorIfNecessary(globalThis)) {
globalThis.throw("Failed to create HTTP server", .{});
}
}
this.app = null;
this.deinit();
return;
};
this.app = app;
this.setRoutes();
// add serverName to the SSL context using default ssl options
if (ssl_config.server_name != null) {
const servername_len = std.mem.span(ssl_config.server_name).len;
if (servername_len > 0) {
this.app.addServerNameWithOptions(ssl_config.server_name, ssl_options);
this.app.domain(ssl_config.server_name[0..servername_len :0]);
if (ssl_config.server_name) |server_name_ptr| {
const server_name: [:0]const u8 = std.mem.span(server_name_ptr);
if (server_name.len > 0) {
app.addServerNameWithOptions(server_name, ssl_options) catch {
if (!globalThis.hasException()) {
if (!throwSSLErrorIfNecessary(globalThis)) {
globalThis.throw("Failed to add serverName: {s}", .{server_name});
}
}
this.deinit();
return;
};
if (throwSSLErrorIfNecessary(globalThis)) {
this.deinit();
return;
}
app.domain(server_name);
if (throwSSLErrorIfNecessary(globalThis)) {
this.deinit();
return;
}
// Ensure the routes are set for that domain name.
this.setRoutes();
}
}
// apply SNI routes if any
if (this.config.sni) |sni| {
for (sni.slice()) |sni_ssl_config| {
const sni_servername_len = std.mem.span(sni_ssl_config.server_name).len;
if (sni_servername_len > 0) {
this.app.addServerNameWithOptions(sni_ssl_config.server_name, sni_ssl_config.asUSockets());
this.app.domain(sni_ssl_config.server_name[0..sni_servername_len :0]);
if (this.config.sni) |*sni| {
for (sni.slice()) |*sni_ssl_config| {
const sni_servername: [:0]const u8 = std.mem.span(sni_ssl_config.server_name);
if (sni_servername.len > 0) {
app.addServerNameWithOptions(sni_servername, sni_ssl_config.asUSockets()) catch {
if (!globalThis.hasException()) {
if (!throwSSLErrorIfNecessary(globalThis)) {
globalThis.throw("Failed to add serverName: {s}", .{sni_servername});
}
}
this.deinit();
return;
};
app.domain(sni_servername);
if (throwSSLErrorIfNecessary(globalThis)) {
this.deinit();
return;
}
// Ensure the routes are set for that domain name.
this.setRoutes();
}
}
}
} else {
this.app = App.create(.{});
app = App.create(.{}) orelse {
if (!globalThis.hasException()) {
globalThis.throw("Failed to create HTTP server", .{});
}
this.deinit();
return;
};
this.app = app;
this.setRoutes();
}
this.ref();
// Starting up an HTTP server is a good time to GC
if (this.vm.aggressive_garbage_collection == .aggressive) {
this.vm.autoGarbageCollect();
} else {
this.vm.eventLoop().performGC();
}
switch (this.config.address) {
.tcp => |tcp| {
var host: ?[*:0]const u8 = null;
@@ -7168,7 +7228,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
}
}
this.app.listenWithConfig(*ThisServer, this, onListen, .{
app.listenWithConfig(*ThisServer, this, onListen, .{
.port = tcp.port,
.host = host,
.options = if (this.config.reuse_port) 0 else 1,
@@ -7176,7 +7236,7 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
},
.unix => |unix| {
this.app.listenOnUnixSocket(
app.listenOnUnixSocket(
*ThisServer,
this,
onListen,
@@ -7185,6 +7245,20 @@ pub fn NewServer(comptime NamespaceType: type, comptime ssl_enabled_: bool, comp
);
},
}
if (globalThis.hasException()) {
this.deinit();
return;
}
this.ref();
// Starting up an HTTP server is a good time to GC
if (this.vm.aggressive_garbage_collection == .aggressive) {
this.vm.autoGarbageCollect();
} else {
this.vm.eventLoop().performGC();
}
}
};
}
@@ -7297,3 +7371,14 @@ comptime {
_ = Server__setIdleTimeout;
}
}
fn throwSSLErrorIfNecessary(globalThis: *JSC.JSGlobalObject) bool {
const err_code = BoringSSL.ERR_get_error();
if (err_code != 0) {
defer BoringSSL.ERR_clear_error();
globalThis.throwValue(JSC.API.Bun.Crypto.createCryptoError(globalThis, err_code));
return true;
}
return false;
}

View File

@@ -1204,7 +1204,11 @@ pub fn wrapStaticMethod(
},
?JSC.Node.StringOrBuffer => {
if (iter.nextEat()) |arg| {
args[i] = JSC.Node.StringOrBuffer.fromJS(globalThis.ptr(), iter.arena.allocator(), arg) orelse {
args[i] = JSC.Node.StringOrBuffer.fromJS(globalThis.ptr(), iter.arena.allocator(), arg) orelse brk: {
if (arg == .undefined) {
break :brk null;
}
globalThis.throwInvalidArguments("expected string or buffer", .{});
iter.deinit();
return JSC.JSValue.zero;

View File

@@ -17,4 +17,4 @@ enum class BufferEncodingType {
};
}
}

View File

@@ -199,4 +199,4 @@ namespace WebCore {
using JSVMClientData = WebCore::JSVMClientData;
using JSHeapData = WebCore::JSHeapData;
}
}

View File

@@ -41,6 +41,11 @@ public:
{
}
static Ref<BunJSGlobalObjectDebuggable> create(JSGlobalObject& globalObject)
{
return adoptRef(*new BunJSGlobalObjectDebuggable(globalObject));
}
void pauseWaitingForAutomaticInspection() override
{
}
@@ -449,7 +454,8 @@ extern "C" void Bun__ensureDebugger(ScriptExecutionContextIdentifier scriptId, b
auto* globalObject = ScriptExecutionContext::getScriptExecutionContext(scriptId)->jsGlobalObject();
globalObject->m_inspectorController = makeUnique<Inspector::JSGlobalObjectInspectorController>(*globalObject, Bun::BunInjectedScriptHost::create());
globalObject->m_inspectorDebuggable = JSGlobalObjectDebuggable::create(*globalObject);
globalObject->m_inspectorDebuggable = BunJSGlobalObjectDebuggable::create(*globalObject);
globalObject->m_inspectorDebuggable->init();
globalObject->setInspectable(true);

View File

@@ -12,4 +12,4 @@ public:
bool isHTMLAllCollection(JSC::VM&, JSC::JSValue) override { return false; }
};
}
}

View File

@@ -106,4 +106,4 @@ class GlobalObject;
namespace Bun {
JSC::JSValue runVirtualModule(Zig::GlobalObject*, BunString* specifier, bool& wasModuleMock);
JSC::Structure* createModuleMockStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype);
}
}

View File

@@ -152,11 +152,11 @@ BunString toStringRef(JSC::JSGlobalObject* globalObject, JSValue value)
return { BunStringTag::Empty };
}
auto impl = str.releaseImpl();
StringImpl* impl = str.impl();
impl->ref();
return { BunStringTag::WTFStringImpl, { .wtf = impl.leakRef() } };
return { BunStringTag::WTFStringImpl, { .wtf = impl } };
}
BunString toString(WTF::String& wtfString)

View File

@@ -53,4 +53,4 @@ public:
private:
MessagePortChannelProviderImpl* m_messagePortChannelProvider;
};
}
}

View File

@@ -6,4 +6,4 @@ namespace WebCore {
class CachedScript {
};
}
}

View File

@@ -44,4 +44,4 @@ private:
void finishCreation(JSC::VM& vm, JSC::JSGlobalObject* globalObject);
};
}
}

View File

@@ -763,7 +763,7 @@ void populateESMExports(
bool ignoreESModuleAnnotation)
{
auto& vm = globalObject->vm();
const Identifier& esModuleMarker = builtinNames(vm).__esModulePublicName();
const Identifier& esModuleMarker = vm.propertyNames->__esModule;
// Bun's intepretation of the "__esModule" annotation:
//
@@ -795,9 +795,23 @@ void populateESMExports(
// unit tests of build tools. Happy to revisit this if users file an issue.
bool needsToAssignDefault = true;
if (result.isObject()) {
auto* exports = result.getObject();
bool hasESModuleMarker = !ignoreESModuleAnnotation && exports->hasProperty(globalObject, esModuleMarker);
if (auto* exports = result.getObject()) {
bool hasESModuleMarker = false;
if (!ignoreESModuleAnnotation) {
auto catchScope = DECLARE_CATCH_SCOPE(vm);
PropertySlot slot(exports, PropertySlot::InternalMethodType::VMInquiry, &vm);
if (exports->getPropertySlot(globalObject, esModuleMarker, slot)) {
JSValue value = slot.getValue(globalObject, esModuleMarker);
if (!value.isUndefinedOrNull()) {
if (value.pureToBoolean() == TriState::True) {
hasESModuleMarker = true;
}
}
}
if (catchScope.exception()) {
catchScope.clearException();
}
}
auto* structure = exports->structure();
uint32_t size = structure->inlineSize() + structure->outOfLineSize();

View File

@@ -13,4 +13,4 @@ public:
}
};
}
}

View File

@@ -4,12 +4,14 @@
#include "JavaScriptCore/JSGlobalObjectFunctions.h"
#include <cstring>
using namespace WTF;
/// Must be called with a buffer of exactly 124
/// Find the length by scanning for the 0
extern "C" size_t WTF__dtoa(char* buf_124_bytes, double number)
{
NumberToStringBuffer& buf = *reinterpret_cast<NumberToStringBuffer*>(buf_124_bytes);
return WTF::numberToStringAndSize(number, buf);
return WTF::numberToStringAndSize(number, buf).size();
}
/// This is the equivalent of the unary '+' operator on a JS string

View File

@@ -41,4 +41,4 @@ protected:
bool m_isCleanupTask;
};
}
}

View File

@@ -18,4 +18,4 @@ ALWAYS_INLINE GCDeferralContext::~GCDeferralContext()
m_vm.heap.collectIfNecessaryOrDefer();
}
} // namespace JSC
} // namespace JSC

View File

@@ -414,4 +414,4 @@ template<typename T>
struct IsIDLArrayBufferViewAllowShared : public std::integral_constant<bool, std::is_base_of<IDLAllowSharedAdaptor<IDLArrayBufferView>, T>::value> {
};
} // namespace WebCore
} // namespace WebCore

View File

@@ -10,4 +10,4 @@ JSC_DECLARE_HOST_FUNCTION(jsDollarCpp);
JSC_DECLARE_HOST_FUNCTION(jsDollarZig);
} // namespace JS2Native
} // namespace Bun
} // namespace Bun

View File

@@ -69,4 +69,4 @@ public:
bool tombstoned { false };
};
} // namespace Zig
} // namespace Zig

View File

@@ -26,4 +26,4 @@ public:
HashSet<Ref<JSC::DeferredWorkTimer::TicketData>> m_pendingTicketsOther;
};
}
}

View File

@@ -2,4 +2,4 @@ namespace Bun {
JSC::JSValue createJSCTestingHelpers(Zig::GlobalObject* global);
}
}

View File

@@ -8,4 +8,4 @@
namespace WebCore {
}
}

View File

@@ -35,4 +35,4 @@ JSClass* toJSDOMGlobalObject(JSC::VM& vm, JSC::JSValue value)
return nullptr;
}
}
}

View File

@@ -12,4 +12,4 @@ namespace Bun {
JSC::JSValue createEnvironmentVariablesMap(Zig::GlobalObject* globalObject);
}
}

View File

@@ -37,4 +37,4 @@ public:
bool isEmpty();
void drain(JSC::VM& vm, JSC::JSGlobalObject* globalObject);
};
}
}

View File

@@ -71,4 +71,4 @@ private:
JSC::WriteBarrier<JSC::JSFunction> m_wrappedFn;
};
}
}

View File

@@ -2,7 +2,7 @@
#include "ZigGlobalObject.h"
namespace Bun {
JSC::JSValue createAsyncHooksBinding(Zig::GlobalObject*);
}
}

View File

@@ -4,4 +4,4 @@ namespace Bun {
JSC::JSValue createNodeFetchInternalBinding(Zig::GlobalObject*);
}
}

View File

@@ -1,11 +1,11 @@
#include "config.h"
namespace Bun {
JSC_DECLARE_HOST_FUNCTION(jsHTTPAssignHeaders);
JSC_DECLARE_HOST_FUNCTION(jsHTTPGetHeader);
JSC_DECLARE_HOST_FUNCTION(jsHTTPSetHeader);
JSC::JSValue createNodeHTTPInternalBinding(Zig::GlobalObject*);
}
}

View File

@@ -2,7 +2,7 @@
#include "ZigGlobalObject.h"
namespace Bun {
JSC::JSValue createNodeTLSBinding(Zig::GlobalObject*);
}
}

View File

@@ -5,4 +5,4 @@ namespace Bun {
JSC::JSValue createNodeURLBinding(Zig::GlobalObject*);
} // namespace Bun
} // namespace Bun

View File

@@ -2,7 +2,7 @@
#include "ZigGlobalObject.h"
namespace Bun {
JSC::JSValue createNodePathBinding(Zig::GlobalObject* globalObject);
} // namespace Bun

View File

@@ -15,4 +15,4 @@ JSC::JSValue createNodeTTYWrapObject(JSC::JSGlobalObject* globalObject);
JSC_DECLARE_HOST_FUNCTION(Process_functionInternalGetWindowSize);
}
}

View File

@@ -10,4 +10,4 @@ JSC_DECLARE_HOST_FUNCTION(jsGetErrorMap);
JSC::JSObject* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject);
} // namespace ProcessBindingUV
} // namespace Bun
} // namespace Bun

View File

@@ -30,7 +30,7 @@
namespace WebCore {
enum class ProcessIdentifierType {};
using ProcessIdentifier = LegacyNullableObjectIdentifier<ProcessIdentifierType>;
using ProcessIdentifier = ObjectIdentifier<ProcessIdentifierType>;
namespace Process {

View File

@@ -206,4 +206,4 @@ public:
ScriptExecutionContext* executionContext(JSC::JSGlobalObject*);
}
}

Some files were not shown because too many files have changed in this diff Show More