Compare commits

...

91 Commits

Author SHA1 Message Date
Jarred Sumner
6ba9b0a27a Merge branch 'main' into jarred/fetchy 2024-10-31 13:31:24 -07:00
Ashcon Partovi
353d44f1ae ci: If only tests change, use artifacts from last successful build (#14927) 2024-10-31 12:50:09 -07:00
Jarred Sumner
4b8ca51b87 Clean up some code in node validators (#14897) 2024-10-31 12:28:07 -07:00
Ciro Spaciari
f8d5b2e6e2 Fix module resolution cache keys (#14901)
Co-authored-by: dave caruso <me@paperdave.net>
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-10-30 22:06:21 -07:00
190n
9647291d73 Implement NAPI type tagging (#14915) 2024-10-30 19:57:48 -07:00
Jarred Sumner
eaa088ba55 Fix missing symbol errors and add a test (#14907)
Co-authored-by: Jarred Sumner <jarred@bun.sh>
2024-10-30 19:55:42 -07:00
Gerd Jungbluth
955cc6265b fix(docs): add missing character in drizzle guide (#14911) 2024-10-30 08:42:38 -07:00
Jarred Sumner
12dee5c720 Merge branch 'main' into jarred/fetchy 2024-10-29 18:57:07 -07:00
Dylan Conway
489890deb1 fix(install): check cached package.jsons (#14899) 2024-10-29 18:55:52 -07:00
pfg
d7710c6c67 Fix additional arguments when running a package.json script (#14895)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-29 18:55:32 -07:00
Jarred Sumner
9f70f68f00 EventEmitter.name should be "EventEmitter" instead of "EventEmitter2" (#14898) 2024-10-29 18:42:24 -07:00
Jarred Sumner
240b2a539f Introduce Bun.randomUUIDv7 (#14858)
Co-authored-by: Dylan Conway <35280289+dylan-conway@users.noreply.github.com>
2024-10-29 16:39:09 -07:00
Jarred Sumner
b9e5420571 Add https://github.com/uNetworking/uWebSockets/pull/1792 (#14864) 2024-10-29 12:56:25 -07:00
Jarred Sumner
b5a73130ad Reduce memory usage in long-running processes (#14885) 2024-10-29 12:56:10 -07:00
Jarred Sumner
d5f9978007 Fix missing symbol error on llvm 18 2024-10-29 00:08:29 -07:00
pfg
698e87aa67 Fix #14187 (#14884) 2024-10-28 18:11:03 -07:00
Zack Radisic
5502278f3e CSS: More stuff and tests (#14832)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-27 13:49:25 -07:00
Jarred Sumner
f005e8c057 Fix HTTP spec issues by upgrading uWS version (#14853) 2024-10-27 12:34:45 -07:00
dave caruso
e93c5ad993 feat(bake): css, production build, dev separateSSRGraph=false (#14622)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-10-27 01:57:36 -07:00
Meghan Denny
5237869101 bun-install-registry.test.ts: remove ini format hint here (#14803) 2024-10-26 16:51:19 -07:00
Jarred Sumner
2456d70ac7 Fixes #14716 (#14834) 2024-10-26 15:15:13 -07:00
Meghan Denny
50d80a805d pm: fix weird package.json formatting after install (#14801) 2024-10-26 01:36:25 -07:00
Meghan Denny
2d9a73fc07 test: fix expected value of 'should perform bin-linking across multiple dependencies' (#14833) 2024-10-26 01:02:24 -07:00
Jarred Sumner
d0b3802a79 github actions 2024-10-25 23:50:12 -07:00
Jarred Sumner
7053212566 Update associate-issue-with-sentry.ts 2024-10-25 23:47:15 -07:00
Jarred Sumner
4f5660a6f7 Add sentry id to crash report comment 2024-10-25 23:40:27 -07:00
Dylan Conway
87279392cf fix 9395 (#14815) 2024-10-25 19:58:45 -07:00
Bjorn Beishline
7f5860331e Fixed compilation issues with no outdir (#14717)
Co-authored-by: Meghan Denny <meghan@bun.sh>
2024-10-25 18:43:58 -07:00
Dylan Conway
b895738156 fix(install): migrate package-lock.json with dependency on root package (#14811)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-25 01:32:17 -07:00
Dylan Conway
61534c7efe Remove warning for unused registry options from npmrc (#14813) 2024-10-25 01:31:39 -07:00
Jarred Sumner
ec4c9f8f84 Update mimalloc (#14814) 2024-10-25 01:31:24 -07:00
Jarred Sumner
35a64d8585 Bump WebKit (#14812) 2024-10-25 01:31:12 -07:00
Minsoo Choo
eb6995e09b Update SvelteKit usage guide (#14777) 2024-10-25 00:04:32 -07:00
Meghan Denny
1391e5269b Revert "ci: merge clang-format and clang-tidy into single pipeline" (#14809) 2024-10-25 00:04:13 -07:00
Dylan Conway
9621b641a1 update test/bun.lockb (#14746) 2024-10-25 00:03:52 -07:00
Dylan Conway
5eaa7301eb fix(install): patches with bin in package.json (#14807) 2024-10-25 00:03:19 -07:00
Arthur
f21870a06c chore(console): updated jsdoc table (#14792) 2024-10-24 21:20:46 -07:00
Don Isaac
0e4006eefd ci: merge clang-format and clang-tidy into single pipeline (#14798) 2024-10-24 15:26:05 -07:00
Dylan Conway
9643a924e1 bump 2024-10-24 14:24:08 -07:00
Jarred Sumner
9e454a43ad See if this fixes the test failure 2024-10-24 13:37:09 -07:00
Jarred Sumner
8cec29c95a Update http.zig 2024-10-24 00:12:57 -07:00
Jarred Sumner
b941dd6f0b Update fetch-gzip.test.ts 2024-10-23 22:42:51 -07:00
Jarred Sumner
f4e0948055 Multi-threaded fetch() body decompression 2024-10-23 22:20:12 -07:00
Dylan Conway
247456b675 fix(install): continue install if optional postinstall fails (#14783) 2024-10-23 21:58:53 -07:00
Meghan Denny
6f60523e6c " -> ' (#14776)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-23 19:21:36 -07:00
Jarred Sumner
2de2e9f600 48 -> 64 2024-10-23 18:27:02 -07:00
Ciro Spaciari
29bf8a505d fix(tests) pq -> pg + populate before (#14748) 2024-10-23 18:01:06 -07:00
Jarred Sumner
93d115f9b7 Reduce default max network connection limit from 256 to 48 in bun install (#14755) 2024-10-23 15:34:16 -07:00
Ashcon Partovi
74e440d58a ci: Set prioritization based on fork, main branch, or queue 2024-10-23 09:16:48 -07:00
Ashcon Partovi
aa4dde976d ci: Fix pipeline script when on main branch 2024-10-23 09:03:06 -07:00
Ashcon Partovi
eb0e9b9bde ci: Skip builds when only docs are changed (#14751) 2024-10-23 08:54:53 -07:00
Liran Tal
a656cc1b70 docs: fix missing code highlight in spawn.md (#14761) 2024-10-23 01:01:21 -07:00
Ashcon Partovi
4044ff740d ci: add scripts for building macOS images (#14743) 2024-10-22 16:07:12 -07:00
Ashcon Partovi
b9240f6ec7 cmake: only enable LTO when release + linux + ci 2024-10-22 13:10:58 -07:00
Eckhardt (Kaizen) Dreyer
3db0191409 fix(install): Skip optional dependencies if false in bunfig.toml (#14629) 2024-10-22 11:55:10 -07:00
Oliver Medhurst
00b055566e contributing: fix fedora llvm install steps (#14726) 2024-10-22 11:40:46 -07:00
snwy
517cdc1392 fix jsx symbol collisions when importing own variables with same names (#14343)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-10-22 00:17:18 -07:00
Pham Minh Triet
8b4b55725e Fix(doc): update Next.js guide (#14730) 2024-10-22 00:16:15 -07:00
Jarred Sumner
38d39109b3 Fix assertion failure 2024-10-21 21:46:17 -07:00
Jarred Sumner
ec29311c7a Bump 2024-10-21 18:05:10 -07:00
Ciro Spaciari
fe8d0079ec tls(Server) fix connectionListener and make alpnProtocol more compatible with node.js (#14695)
Co-authored-by: cirospaciari <cirospaciari@users.noreply.github.com>
2024-10-21 01:58:14 +00:00
Jarred Sumner
8063e9d6b8 Fixes #14411 (#14691) 2024-10-20 22:02:44 +00:00
Vaggelis Papadogiannakis
ae8de1926e Update instructions to run a bun application via pm2 with the use… (#14704) 2024-10-20 15:06:45 -07:00
Minsoo Choo
b9b94de5ed icu required on openSUSE for local webkit build (#14690) 2024-10-20 01:08:42 -07:00
Jarred Sumner
070e5804ad Implement crypto.hash() (#14683) 2024-10-19 12:14:23 -07:00
Jarred Sumner
67b4478137 Fixes #14333 (#14679) 2024-10-19 01:14:13 -07:00
Jarred Sumner
522c9fa22d Clarify some of this 2024-10-19 00:26:30 -07:00
Jarred Sumner
4b63ffeceb Clarify node-fallbacks 2024-10-19 00:23:57 -07:00
Pham Minh Triet
fe45b1e9b9 Fix(doc): SNI typo (#14508) 2024-10-18 22:37:57 -07:00
Jarred Sumner
d41ca824dd Bump 2024-10-18 22:32:42 -07:00
Meghan Denny
663331c56f fix regression in BunJSGlobalObjectDebuggable from most recent webkit upgrade (#14675) 2024-10-18 22:31:39 -07:00
Meghan Denny
64d0b626b9 Bun.color: fill out missing options and examples for outputFormat (#14656)
Co-authored-by: Zack Radisic <56137411+zackradisic@users.noreply.github.com>
2024-10-18 22:29:53 -07:00
Dylan Conway
e5c00ab4b4 fix(CryptoHasher): throw error if update or digest are called after digest (#14677) 2024-10-19 02:21:41 +00:00
Meghan Denny
4f2d924db3 Bun.color: match accepted outputFormat options to error (#14657) 2024-10-19 00:34:56 +00:00
Ashcon Partovi
bf8a75a63f Revert "Remove soft_fail from Buildkite since merge queue is enabled"
This reverts commit 253cc15a9f.
2024-10-18 16:04:58 -07:00
Ashcon Partovi
253cc15a9f Remove soft_fail from Buildkite since merge queue is enabled 2024-10-18 13:28:24 -07:00
Meghan Denny
fbf4b30e70 bun-types: add missing options to DigestEncoding (#14654) 2024-10-18 19:17:10 +00:00
Dylan Conway
f3b658d9f7 fix double free with invalid TLSOptions (#14648) 2024-10-18 05:16:21 +00:00
Ciro Spaciari
b652136cf7 update docs (#14620) 2024-10-18 01:26:50 +00:00
Ashcon Partovi
8376b82371 Fix merge queue (#14646) 2024-10-18 01:22:35 +00:00
Ashcon Partovi
7bb39023b8 Merge queue (#14639) 2024-10-18 01:14:42 +00:00
Meghan Denny
850cdb0587 vscode: set the launch configs' cwd to the root (#14643) 2024-10-17 16:24:10 -07:00
Ciro Spaciari
2f2a24f625 bench: fix grpc and scripts (#14638) 2024-10-17 13:30:47 -07:00
Dylan Conway
e448c4cc3b fs.mkdir empty string bugfix (#14510) 2024-10-16 18:55:49 -07:00
Ciro Spaciari
2d0b557ff7 add grpc-js bench (#14601) 2024-10-16 11:11:53 -07:00
Meghan Denny
15f5ba3e26 jest: print received value when expect().toThrow() doesnt throw (#14608) 2024-10-16 11:11:26 -07:00
refi64
1385f9f686 cmake: force the c-ares libdir to always be 'lib' (#14602) 2024-10-16 10:13:20 -07:00
Ciro Spaciari
07ccec0fd8 H2 fixes (#14606) 2024-10-16 09:06:56 -07:00
Dylan Conway
7283453eed use memset_patternN in Buffer.fill (#14599) 2024-10-15 21:16:57 -07:00
Ciro Spaciari
1a08cfcd6b fix h2 tests failures (#14598) 2024-10-15 18:36:23 -07:00
Meghan Denny
06e733cc64 ci: run clang-format on .h files too (#14597)
Co-authored-by: nektro <nektro@users.noreply.github.com>
2024-10-15 16:54:49 -07:00
410 changed files with 82587 additions and 7899 deletions

474
.buildkite/ci.mjs Normal file
View File

@@ -0,0 +1,474 @@
#!/usr/bin/env node
/**
* Build and test Bun on macOS, Linux, and Windows.
* @link https://buildkite.com/docs/pipelines/defining-steps
*/
import { writeFileSync } from "node:fs";
import { join } from "node:path";
function getEnv(name, required = true) {
const value = process.env[name];
if (!value && required) {
throw new Error(`Missing environment variable: ${name}`);
}
return value;
}
function getRepository() {
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
if (!match) {
throw new Error(`Unsupported repository: ${url}`);
}
const [, owner, repo] = match;
return `${owner}/${repo}`;
}
function getCommit() {
return getEnv("BUILDKITE_COMMIT");
}
function getBranch() {
return getEnv("BUILDKITE_BRANCH");
}
function getMainBranch() {
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
}
function isFork() {
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
return !!repository && repository !== getEnv("BUILDKITE_REPO");
}
function isMainBranch() {
return getBranch() === getMainBranch() && !isFork();
}
function isMergeQueue() {
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
}
function isPullRequest() {
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
}
async function getChangedFiles() {
const repository = getRepository();
const head = getCommit();
const base = `${head}^1`;
try {
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
if (response.ok) {
const { files } = await response.json();
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
}
} catch (error) {
console.error(error);
}
}
function getBuildUrl() {
return getEnv("BUILDKITE_BUILD_URL");
}
async function getBuildIdWithArtifacts() {
let depth = 0;
let url = getBuildUrl();
while (url) {
const response = await fetch(`${url}.json`, {
headers: {
"Accept": "application/json",
},
});
if (!response.ok) {
return;
}
const { id, state, prev_branch_build: lastBuild, steps } = await response.json();
if (depth++) {
if (state === "failed" || state === "passed") {
const buildSteps = steps.filter(({ label }) => label.endsWith("build-bun"));
if (buildSteps.length) {
if (buildSteps.every(({ outcome }) => outcome === "passed")) {
return id;
}
return;
}
}
}
if (!lastBuild) {
return;
}
url = url.replace(/\/builds\/[0-9]+/, `/builds/${lastBuild["number"]}`);
}
}
function isDocumentation(filename) {
return /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/.test(filename);
}
function isTest(filename) {
return /^test/.test(filename);
}
function toYaml(obj, indent = 0) {
const spaces = " ".repeat(indent);
let result = "";
for (const [key, value] of Object.entries(obj)) {
if (value === undefined) {
continue;
}
if (value === null) {
result += `${spaces}${key}: null\n`;
continue;
}
if (Array.isArray(value)) {
result += `${spaces}${key}:\n`;
value.forEach(item => {
if (typeof item === "object" && item !== null) {
result += `${spaces}- \n${toYaml(item, indent + 2)
.split("\n")
.map(line => `${spaces} ${line}`)
.join("\n")}\n`;
} else {
result += `${spaces}- ${item}\n`;
}
});
continue;
}
if (typeof value === "object") {
result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`;
continue;
}
if (
typeof value === "string" &&
(value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n"))
) {
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
continue;
}
result += `${spaces}${key}: ${value}\n`;
}
return result;
}
function getPipeline(buildId) {
/**
* Helpers
*/
const getKey = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `${os}-${arch}-baseline`;
}
return `${os}-${arch}`;
};
const getLabel = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `:${os}: ${arch}-baseline`;
}
return `:${os}: ${arch}`;
};
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
const getRetry = (limit = 3) => {
return {
automatic: [
{ exit_status: 1, limit: 1 },
{ exit_status: -1, limit },
{ exit_status: 255, limit },
{ signal_reason: "agent_stop", limit },
],
};
};
// https://buildkite.com/docs/pipelines/managing-priorities
const getPriority = () => {
if (isFork()) {
return -1;
}
if (isMainBranch()) {
return 2;
}
if (isMergeQueue()) {
return 1;
}
return 0;
};
/**
* Steps
*/
const getBuildVendorStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-vendor`,
label: `${getLabel(platform)} - build-vendor`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target dependencies",
};
};
const getBuildCppStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-cpp`,
label: `${getLabel(platform)} - build-cpp`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_CPP_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getBuildZigStep = platform => {
const { os, arch, baseline } = platform;
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
return {
key: `${getKey(platform)}-build-zig`,
label: `${getLabel(platform)} - build-zig`,
agents: {
queue: "build-zig",
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
};
const getBuildBunStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-bun`,
label: `${getLabel(platform)} - build-bun`,
depends_on: [
`${getKey(platform)}-build-vendor`,
`${getKey(platform)}-build-cpp`,
`${getKey(platform)}-build-zig`,
],
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_LINK_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getTestBunStep = platform => {
const { os, arch, distro, release } = platform;
let name;
if (os === "darwin" || os === "windows") {
name = getLabel(platform);
} else {
name = getLabel({ ...platform, os: distro });
}
let agents;
if (os === "darwin") {
agents = { os, arch, queue: `test-darwin` };
} else if (os === "windows") {
agents = { os, arch, robobun: true };
} else {
agents = { os, arch, distro, release, robobun: true };
}
let command;
if (os === "windows") {
command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`;
} else {
command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`;
}
let parallelism;
if (os === "darwin") {
parallelism = 2;
} else {
parallelism = 10;
}
let depends;
let env;
if (buildId) {
env = {
BUILDKITE_ARTIFACT_BUILD_ID: buildId,
};
} else {
depends = [`${getKey(platform)}-build-bun`];
}
return {
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
label: `${name} - test-bun`,
depends_on: depends,
agents,
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
soft_fail: isMainBranch(),
parallelism,
command,
env,
};
};
/**
* Config
*/
const buildPlatforms = [
{ os: "darwin", arch: "aarch64" },
{ os: "darwin", arch: "x64" },
{ os: "linux", arch: "aarch64" },
{ os: "linux", arch: "x64" },
{ os: "linux", arch: "x64", baseline: true },
{ os: "windows", arch: "x64" },
{ os: "windows", arch: "x64", baseline: true },
];
const testPlatforms = [
{ os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "aarch64", distro: "ventura", release: "13" },
{ os: "darwin", arch: "x64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "x64", distro: "ventura", release: "13" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
];
return {
priority: getPriority(),
steps: [
...buildPlatforms.map(platform => {
const { os, arch, baseline } = platform;
let steps = [
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
];
if (!buildId) {
steps.unshift(
getBuildVendorStep(platform),
getBuildCppStep(platform),
getBuildZigStep(platform),
getBuildBunStep(platform),
);
}
return {
key: getKey(platform),
group: getLabel(platform),
steps,
};
}),
],
};
}
async function main() {
console.log("Checking environment...");
console.log(" - Repository:", getRepository());
console.log(" - Branch:", getBranch());
console.log(" - Commit:", getCommit());
console.log(" - Is Main Branch:", isMainBranch());
console.log(" - Is Merge Queue:", isMergeQueue());
console.log(" - Is Pull Request:", isPullRequest());
let buildId;
const changedFiles = await getChangedFiles();
if (changedFiles) {
console.log(
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
);
if (changedFiles.every(filename => isDocumentation(filename))) {
console.log("Since changed files are only documentation, skipping...");
return;
}
if (changedFiles.every(filename => isTest(filename) || isDocumentation(filename))) {
buildId = await getBuildIdWithArtifacts();
if (buildId) {
console.log("Since changed files are only tests, using build artifacts from previous build...", buildId);
} else {
console.log("Changed files are only tests, but could not find previous build artifacts...");
}
}
}
const pipeline = getPipeline(buildId);
const content = toYaml(pipeline);
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
writeFileSync(contentPath, content);
console.log("Generated pipeline:");
console.log(" - Path:", contentPath);
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
}
await main();

View File

@@ -1,790 +0,0 @@
# Build and test Bun on macOS, Linux, and Windows.
# https://buildkite.com/docs/pipelines/defining-steps
#
# If a step has the `robobun: true` label, robobun will listen
# to webhooks from Buildkite and provision a VM to run the step.
#
# Changes to this file will be automatically uploaded on the next run
# for a particular commit.
steps:
# macOS aarch64
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
- key: "darwin-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-test-macos-14"
label: ":darwin: 14 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
- key: "darwin-aarch64-test-macos-13"
label: ":darwin: 13 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
# macOS x64
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
- key: "darwin-x64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-test-macos-14"
label: ":darwin: 14 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
- key: "darwin-x64-test-macos-13"
label: ":darwin: 13 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
# Linux x64
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64"
- key: "linux-x64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-test-debian-12"
label: ":debian: 12 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
# Linux x64-baseline
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
- key: "linux-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-test-debian-12"
label: ":debian: 12 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
# Linux aarch64
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
- key: "linux-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-test-debian-12"
label: ":debian: 12 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2204"
label: ":ubuntu: 22.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2004"
label: ":ubuntu: 20.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
# Windows x64
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64"
- key: "windows-x64-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-test-bun"
label: ":windows: x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
# Windows x64-baseline
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
- key: "windows-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-test-bun"
label: ":windows: x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-baseline-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"

View File

@@ -29,6 +29,10 @@ function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_node() {
assert_command "node" "node" "https://nodejs.org/en/download/"
}
function assert_command() {
local command="$1"
local package="$2"
@@ -92,6 +96,12 @@ assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_node
assert_release
assert_canary
upload_buildkite_pipeline ".buildkite/ci.yml"
run_command node ".buildkite/ci.mjs"
if [ -f ".buildkite/ci.yml" ]; then
upload_buildkite_pipeline ".buildkite/ci.yml"
fi

2
.gitattributes vendored
View File

@@ -49,3 +49,5 @@ vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -83,6 +83,26 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -92,7 +112,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -106,6 +126,40 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,6 +7,7 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

5
.gitignore vendored
View File

@@ -141,6 +141,7 @@ test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
# Dependencies
/vendor
@@ -163,3 +164,7 @@ scripts/env.local
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml

128
.vscode/launch.json generated vendored
View File

@@ -14,7 +14,7 @@
"name": "bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -22,6 +22,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -29,7 +31,7 @@
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -37,6 +39,8 @@
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -50,7 +54,7 @@
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -58,6 +62,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -65,7 +71,7 @@
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
@@ -73,6 +79,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -80,7 +88,7 @@
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -88,6 +96,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -95,7 +105,7 @@
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -103,6 +113,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -110,7 +122,7 @@
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -119,6 +131,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -131,7 +145,7 @@
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -140,6 +154,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -160,6 +176,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -178,6 +196,8 @@
"GOMAXPROCS": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -192,6 +212,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -206,6 +228,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -220,6 +244,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -235,6 +261,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -255,6 +283,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -268,7 +298,7 @@
"name": "bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -276,6 +306,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -283,7 +315,7 @@
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -291,6 +323,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -298,7 +332,7 @@
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -306,6 +340,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -313,7 +349,7 @@
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -321,6 +357,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -328,7 +366,7 @@
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -336,6 +374,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -343,7 +383,7 @@
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -352,6 +392,8 @@
"BUN_INSPECT": "ws://localhost:0/?wait=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -364,7 +406,7 @@
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -373,6 +415,8 @@
"BUN_INSPECT": "ws://localhost:0/?break=1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -393,6 +437,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -401,13 +447,15 @@
"name": "bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -415,13 +463,15 @@
"name": "bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -429,7 +479,7 @@
"name": "bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -437,6 +487,8 @@
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -456,6 +508,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -470,6 +524,8 @@
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -481,7 +537,7 @@
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -510,7 +566,7 @@
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -539,7 +595,7 @@
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -568,7 +624,7 @@
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -597,7 +653,7 @@
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -635,7 +691,7 @@
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -822,7 +878,7 @@
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -851,7 +907,7 @@
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -880,7 +936,7 @@
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -909,7 +965,7 @@
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -938,7 +994,7 @@
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -967,7 +1023,7 @@
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1005,7 +1061,7 @@
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1070,7 +1126,7 @@
"name": "Windows: bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1095,7 +1151,7 @@
"name": "Windows: bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1124,7 +1180,7 @@
"name": "Windows: bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}/test",
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1182,6 +1238,8 @@
},
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [
@@ -1196,4 +1254,4 @@
"description": "Usage: bun test [...]",
},
],
}
}

View File

@@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
```
{% /codetabs %}
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
```bash#Fedora
$ sudo dnf install 'dnf-command(copr)'
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
$ sudo dnf install llvm16 clang16 lld16-devel
```
```bash#openSUSE Tumbleweed

2
LATEST
View File

@@ -1 +1 @@
1.1.30
1.1.33

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,11 +1,11 @@
{
"name": "bench",
"scripts": {
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"deps": "cd src && bun run deps",
"build": "cd src && bun run build",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -0,0 +1,14 @@
syntax = "proto3";
package benchmark;
service BenchmarkService {
rpc Ping(Request) returns (Response);
}
message Request {
string message = 1;
}
message Response {
string message = 1;
}

View File

@@ -0,0 +1,33 @@
-----BEGIN CERTIFICATE-----
MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL
BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j
YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE
BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD
VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6
LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/
cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia
SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX
InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8
RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr
uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ
x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ
hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw
5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR
Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G
TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV
FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF
MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11
jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0
GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H
HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb
P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99
p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p
OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo
Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn
Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB
n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK
qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL
-----END CERTIFICATE-----

View File

@@ -0,0 +1,31 @@
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");
const packageDefinition = protoLoader.loadSync("benchmark.proto", {});
const proto = grpc.loadPackageDefinition(packageDefinition).benchmark;
const fs = require("fs");
function ping(call, callback) {
callback(null, { message: "Hello, World" });
}
function main() {
const server = new grpc.Server();
server.addService(proto.BenchmarkService.service, { ping: ping });
const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true");
const port = process.env.PORT || 50051;
const host = process.env.HOST || "localhost";
let credentials;
if (tls) {
const ca = fs.readFileSync("./cert.pem");
const key = fs.readFileSync("./key.pem");
const cert = fs.readFileSync("./cert.pem");
credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]);
} else {
credentials = grpc.ServerCredentials.createInsecure();
}
server.bindAsync(`${host}:${port}`, credentials, () => {
console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`);
});
}
main();

52
bench/grpc-server/key.pem Normal file
View File

@@ -0,0 +1,52 @@
-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN
THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678
menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP
BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL
ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf
v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t
D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV
SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS
8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA
TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV
4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB
IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc
wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV
SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa
WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ
8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t
/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3
cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u
RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5
ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9
uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc
Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0
8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs
B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt
otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS
VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS
TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO
z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J
P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO
auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r
hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD
GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD
Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+
Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw
/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo
+qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD
UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY
aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG
wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP
BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr
vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF
kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r
QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K
Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8
oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf
Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO
eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl
VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f
kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD
z/cCLOrUJfealezimyd8SKPWPeHhrA==
-----END PRIVATE KEY-----

View File

@@ -0,0 +1,15 @@
{
"name": "bench",
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bun:server": "TLS=1 PORT=50051 bun ./index.js",
"node:server": "TLS=1 PORT=50051 node ./index.js",
"bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051",
"bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051"
},
"dependencies": {
"@grpc/grpc-js": "1.12.0",
"@grpc/proto-loader": "0.7.10"
}
}

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
},
"dependencies": {

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js | grep iter",
"bench:node": "$NODE node.mjs | grep iter",
"bench:deno": "$DENO run -A --unstable deno.mjs | grep iter",
"bench:bun": "bun bun.js | grep iter",
"bench:node": "node node.mjs | grep iter",
"bench:deno": "deno run -A --unstable deno.mjs | grep iter",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -3,8 +3,8 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench": "bun run bench:bun && bun run bench:node"
}
}

View File

@@ -0,0 +1,15 @@
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {
const buffer = Buffer.allocUnsafe(size);
const pattern = "x".repeat(fillSize);
bench(`Buffer.fill ${size} bytes with ${fillSize} byte value`, () => {
buffer.fill(pattern);
});
}
}
await run();

View File

@@ -5,10 +5,10 @@
},
"scripts": {
"build": "exit 0",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"bench:deno": "deno run -A --unstable-ffi deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

84
ci/README.md Normal file
View File

@@ -0,0 +1,84 @@
# CI
This directory contains scripts for building CI images for Bun.
## Building
### `macOS`
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
To install the dependencies required, run:
```sh
$ cd ci
$ bun run bootstrap
```
To build a vanilla macOS VM, run:
```sh
$ bun run build:darwin-aarch64-vanilla
```
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
> Note: The image size is 50GB, so make sure you have enough disk space.
If you want to build a specific macOS release, you can run:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: You cannot build a newer release of macOS on an older macOS machine.
To build a macOS VM with software installed to build and test Bun, run:
```sh
$ bun run build:darwin-aarch64
```
## Running
### `macOS`
## How To
### Support a new macOS release
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
```hcl
sonoma = {
distro = "sonoma"
release = "15"
ipsw = "https://updates.cdn-apple.com/..."
}
```
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
4. Test and build the non-vanilla image:
```sh
$ bun run build:darwin-aarch64-15
```
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
5. Publish the images:
```sh
$ bun run login
$ bun run publish:darwin-aarch64-vanilla-15
$ bun run publish:darwin-aarch64-15
```

View File

@@ -0,0 +1,46 @@
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
# See login.sh and optimize.sh for details.
data "external-raw" "boot-script" {
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
}
source "tart-cli" "bun-darwin-aarch64-vanilla" {
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
from_ipsw = local.release.ipsw
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
create_grace_time = "30s"
boot_command = split("\n", data.external-raw.boot-script.result)
headless = true # Disable if you need to debug why the boot_command is not working
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
provisioner "file" {
content = file("scripts/setup-login.sh")
destination = "/tmp/setup-login.sh"
}
provisioner "shell" {
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
}
provisioner "file" {
content = file("scripts/optimize-machine.sh")
destination = "/tmp/optimize-machine.sh"
}
provisioner "shell" {
inline = ["sudo sh /tmp/optimize-machine.sh"]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

44
ci/darwin/image.pkr.hcl Normal file
View File

@@ -0,0 +1,44 @@
# Generates a macOS VM with software installed to build and test Bun.
source "tart-cli" "bun-darwin-aarch64" {
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
headless = true
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64"]
provisioner "file" {
content = file("../../scripts/bootstrap.sh")
destination = "/tmp/bootstrap.sh"
}
provisioner "shell" {
inline = ["CI=true sh /tmp/bootstrap.sh"]
}
provisioner "file" {
source = "darwin/plists/"
destination = "/tmp/"
}
provisioner "shell" {
inline = [
"sudo ls /tmp/",
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -0,0 +1,44 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.buildkite.buildkite-agent</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/buildkite-agent</string>
<string>start</string>
</array>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false />
</dict>
<key>RunAtLoad</key>
<true />
<key>StandardOutPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>EnvironmentVariables</key>
<dict>
<key>BUILDKITE_AGENT_CONFIG</key>
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
</dict>
<key>LimitLoadToSessionType</key>
<array>
<string>Aqua</string>
<string>LoginWindow</string>
<string>Background</string>
<string>StandardIO</string>
<string>System</string>
</array>
</dict>
</plist>

View File

@@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscale</string>
<string>up</string>
<string>--ssh</string>
<string>--authkey</string>
<string>${TAILSCALE_AUTHKEY}</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -0,0 +1,16 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscaled</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

124
ci/darwin/scripts/boot-image.sh Executable file
View File

@@ -0,0 +1,124 @@
#!/bin/sh
# This script generates the boot commands for the macOS installer GUI.
# It is run on your local machine, not inside the VM.
# Sources:
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
echo "Script must be run with variables: release, username, and password" >&2
exit 1
fi
# Hello, hola, bonjour, etc.
echo "<wait120s><spacebar>"
# Select Your Country and Region
echo "<wait30s>italiano<esc>english<enter>"
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
# Written and Spoken Languages
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Accessibility
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Data & Privacy
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Migration Assistant
echo "<wait30s><tab><tab><tab><spacebar>"
# Sign In with Your Apple ID
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you want to skip signing in with an Apple ID?
echo "<wait30s><tab><spacebar>"
# Terms and Conditions
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# I have read and agree to the macOS Software License Agreement
echo "<wait30s><tab><spacebar>"
# Create a Computer Account
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
# Enable Location Services
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you don't want to use Location Services?
echo "<wait30s><tab><spacebar>"
# Select Your Time Zone
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
# Analytics
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Screen Time
echo "<wait30s><tab><spacebar>"
# Siri
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
# Choose Your Look
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
# Enable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
else
# Welcome to Mac
echo "<wait30s><spacebar>"
# Enable Keyboard navigation
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
echo "<wait30s><leftAltOn>q<leftAltOff>"
fi
# Now that the installation is done, open "System Settings"
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
# Navigate to "Sharing"
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
if [ "${release}" = "13" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><down><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
# Open "Remote Login" details
echo "<wait30s><tab><spacebar>"
# Enable "Full Disk Access"
echo "<wait30s><tab><spacebar>"
# Click "Done"
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Disable Voice Over
echo "<leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "14" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
# Disable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "15" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
fi
# Quit System Settings
echo "<wait30s><leftAltOn>q<leftAltOff>"

View File

@@ -0,0 +1,122 @@
#!/bin/sh
# This script optimizes macOS for virtualized environments.
# It disables things like spotlight, screen saver, and sleep.
# Sources:
# - https://github.com/sickcodes/osx-optimizer
# - https://github.com/koding88/MacBook-Optimization-Script
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
disable_software_update() {
execute softwareupdate --schedule off
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
execute defaults write com.apple.commerce AutoUpdate -bool false
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
}
disable_spotlight() {
execute mdutil -i off -a
execute mdutil -E /
}
disable_siri() {
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
execute defaults write com.apple.Siri StatusMenuVisible -bool false
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
}
disable_sleep() {
execute systemsetup -setsleep Never
execute systemsetup -setcomputersleep Never
execute systemsetup -setdisplaysleep Never
execute systemsetup -setharddisksleep Never
}
disable_screen_saver() {
execute defaults write com.apple.screensaver loginWindowIdleTime 0
execute defaults write com.apple.screensaver idleTime 0
}
disable_screen_lock() {
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
}
disable_wallpaper() {
execute defaults write com.apple.loginwindow DesktopPicture ""
}
disable_application_state() {
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
}
disable_accessibility() {
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
execute defaults write com.apple.universalaccess reduceMotion -int 1
execute defaults write com.apple.universalaccess reduceTransparency -int 1
}
disable_dashboard() {
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
execute killall Dock
}
disable_animations() {
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
execute defaults write -g QLPanelAnimationDuration -float 0
execute defaults write com.apple.finder DisableAllAnimations -bool true
}
disable_time_machine() {
execute tmutil disable
}
enable_performance_mode() {
# https://support.apple.com/en-us/101992
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
fi
}
add_terminal_to_desktop() {
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
}
main() {
disable_software_update
disable_spotlight
disable_siri
disable_sleep
disable_screen_saver
disable_screen_lock
disable_wallpaper
disable_application_state
disable_accessibility
disable_dashboard
disable_animations
disable_time_machine
enable_performance_mode
add_terminal_to_desktop
}
main

View File

@@ -0,0 +1,78 @@
#!/bin/sh
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
# Sources:
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
kcpassword() {
passwd="$1"
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
key_len=33
passwd_len=${#passwd_hex}
remainder=$((passwd_len % key_len))
if [ $remainder -ne 0 ]; then
padding=$((key_len - remainder))
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
fi
result=""
i=0
while [ $i -lt ${#passwd_hex} ]; do
for byte in $key; do
[ $i -ge ${#passwd_hex} ] && break
p="${passwd_hex:$i:2}"
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
result="${result}${r}"
i=$((i + 2))
done
done
echo "$result"
}
login() {
username="$1"
password="$2"
enable_passwordless_sudo() {
execute mkdir -p /etc/sudoers.d/
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
}
enable_auto_login() {
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
}
disable_screen_lock() {
execute sysadminctl -screenLock off -password "${password}"
}
enable_passwordless_sudo
enable_auto_login
disable_screen_lock
}
if [ $# -ne 2 ]; then
echo "Usage: $0 <username> <password>" >&2
exit 1
fi
login "$@"

View File

@@ -0,0 +1,78 @@
packer {
required_plugins {
tart = {
version = ">= 1.12.0"
source = "github.com/cirruslabs/tart"
}
external = {
version = ">= 0.0.2"
source = "github.com/joomcode/external"
}
}
}
variable "release" {
type = number
default = 13
}
variable "username" {
type = string
default = "admin"
}
variable "password" {
type = string
default = "admin"
}
variable "cpu_count" {
type = number
default = 2
}
variable "memory_gb" {
type = number
default = 4
}
variable "disk_size_gb" {
type = number
default = 50
}
locals {
sequoia = {
tier = 1
distro = "sequoia"
release = "15"
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
}
sonoma = {
tier = 2
distro = "sonoma"
release = "14"
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
}
ventura = {
tier = 2
distro = "ventura"
release = "13"
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
}
releases = {
15 = local.sequoia
14 = local.sonoma
13 = local.ventura
}
release = local.releases[var.release]
username = var.username
password = var.password
cpu_count = var.cpu_count
memory_gb = var.memory_gb
disk_size_gb = var.disk_size_gb
}

27
ci/package.json Normal file
View File

@@ -0,0 +1,27 @@
{
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
}
}

View File

@@ -79,7 +79,7 @@ endif()
optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION})
if(RELEASE AND LINUX)
if(RELEASE AND LINUX AND CI)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)

View File

@@ -1,6 +1,11 @@
# https://clang.llvm.org/docs/ClangFormat.html
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
file(GLOB BUN_H_SOURCES LIST_DIRECTORIES false ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/*.h
${CWD}/src/bun.js/modules/*.h
)
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} ${BUN_H_SOURCES})
register_command(
TARGET

View File

@@ -482,13 +482,16 @@ endif()
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
set(IS_ARM64 ON)
if(APPLE)
set(ZIG_CPU "apple_m1")
else()
set(ZIG_CPU "native")
endif()
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
set(IS_X86_64 ON)
if(ENABLE_BASELINE)
set(ZIG_CPU "nehalem")
else()
@@ -846,6 +849,29 @@ else()
set(LLD_NAME lld-${LLVM_VERSION_MAJOR})
endif()
if (IS_ARM64)
set(ARCH_WRAP_FLAGS
-Wl,--wrap=fcntl64
-Wl,--wrap=statx
)
elseif(IS_X86_64)
set(ARCH_WRAP_FLAGS
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
)
endif()
target_link_options(${bun} PUBLIC
-fuse-ld=${LLD_NAME}
-fno-pic
@@ -856,32 +882,21 @@ else()
-Wl,--as-needed
-Wl,--gc-sections
-Wl,-z,stack-size=12800000
${ARCH_WRAP_FLAGS}
-Wl,--wrap=cosf
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fmod
-Wl,--wrap=fmodf
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=log
-Wl,--wrap=log10f
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=pow
-Wl,--wrap=powf
-Wl,--wrap=sincosf
-Wl,--wrap=sinf
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
-Wl,--wrap=tanf
-Wl,--compress-debug-sections=zlib
-Wl,-z,lazy

View File

@@ -18,6 +18,7 @@ register_cmake_command(
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DCARES_SHARED=OFF
-DCARES_BUILD_TOOLS=OFF # this was set to ON?
-DCMAKE_INSTALL_LIBDIR=lib
LIB_PATH
lib
LIBRARIES

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
4c283af60cdae205df5a872530c77e2a6a307d43
82b2c2277a4d570187c07b376557dc5bde81d848
)
set(MIMALLOC_CMAKE_ARGS

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 12e2f46fb01f7c5cf5a992b9414ddfaab32b7110)
set(WEBKIT_VERSION 9b84f43643eff64ab46daec9b860de262c80f5e2)
endif()
if(WEBKIT_LOCAL)

View File

@@ -402,7 +402,7 @@ Bun.serve({
});
```
### Sever name indication (SNI)
### Server name indication (SNI)
To configure the server name indication (SNI) for the server, set the `serverName` field in the `tls` object.

View File

@@ -179,7 +179,7 @@ proc.kill(); // specify an exit code
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
```
```ts
const proc = Bun.spawn(["bun", "--version"]);
proc.unref();
```

View File

@@ -106,6 +106,57 @@ const ls = Bun.which("ls", {
console.log(ls); // null
```
You can think of this as a builtin alternative to the [`which`](https://www.npmjs.com/package/which) npm package.
## `Bun.randomUUIDv7()`
`Bun.randomUUIDv7()` returns a [UUID v7](https://www.ietf.org/archive/id/draft-peabody-dispatch-new-uuid-format-01.html#name-uuidv7-layout-and-bit-order), which is monotonic and suitable for sorting and databases.
```ts
import { randomUUIDv7 } from "bun";
const id = randomUUIDv7();
// => "0192ce11-26d5-7dc3-9305-1426de888c5a"
```
A UUID v7 is a 128-bit value that encodes the current timestamp, a random value, and a counter. The timestamp is encoded using the lowest 48 bits, and the random value and counter are encoded using the remaining bits.
The `timestamp` parameter defaults to the current time in milliseconds. When the timestamp changes, the counter is reset to a psuedo-random integer wrapped to 4096. This counter is atomic and threadsafe, meaning that using `Bun.randomUUIDv7()` in many Workers within the same process running at the same timestamp will not have colliding counter values.
The final 8 bytes of the UUID are a cryptographically secure random value. It uses the same random number generator used by `crypto.randomUUID()` (which comes from BoringSSL, which in turn comes from the platform-specific system random number generator usually provided by the underlying hardware).
```ts
namespace Bun {
function randomUUIDv7(
encoding?: "hex" | "base64" | "base64url" = "hex",
timestamp?: number = Date.now(),
): string;
/**
* If you pass "buffer", you get a 16-byte buffer instead of a string.
*/
function randomUUIDv7(
encoding: "buffer",
timestamp?: number = Date.now(),
): Buffer;
// If you only pass a timestamp, you get a hex string
function randomUUIDv7(timestamp?: number = Date.now()): string;
}
```
You can optionally set encoding to `"buffer"` to get a 16-byte buffer instead of a string. This can sometimes avoid string conversion overhead.
```ts#buffer.ts
const buffer = Bun.randomUUIDv7("buffer");
```
`base64` and `base64url` encodings are also supported when you want a slightly shorter string.
```ts#base64.ts
const base64 = Bun.randomUUIDv7("base64");
const base64url = Bun.randomUUIDv7("base64url");
```
## `Bun.peek()`
`Bun.peek(prom: Promise)`

View File

@@ -19,7 +19,7 @@ $ bun add -D drizzle-kit
Create a `.env.local` file and add your [Neon Postgres connection string](https://neon.tech/docs/connect/connect-from-any-app) to it.
```sh
DATBASE_URL=postgresql://username:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
DATABASE_URL=postgresql://username:password@ep-adj-noun-guid.us-east-1.aws.neon.tech/neondb?sslmode=require
```
---
@@ -217,4 +217,4 @@ $ bun run index.ts
This example used the Neon serverless driver's SQL-over-HTTP functionality. Neon's serverless driver also exposes `Client` and `Pool` constructors to enable sessions, interactive transactions, and node-postgres compatibility. Refer to [Neon's documentation](https://neon.tech/docs/serverless/serverless-driver) for a complete overview.
Refer to the [Drizzle website](https://orm.drizzle.team/docs/overview) for more documentation on using the Drizzle ORM.
Refer to the [Drizzle website](https://orm.drizzle.team/docs/overview) for more documentation on using the Drizzle ORM.

View File

@@ -2,7 +2,7 @@
name: Build an app with Next.js and Bun
---
Initialize a Next.js app with `create-next-app`. This automatically installs dependencies using `npm`.
Initialize a Next.js app with `create-next-app`. This will scaffold a new Next.js project and automatically install dependencies.
```sh
$ bun create next-app

View File

@@ -37,7 +37,10 @@ Alternatively, you can create a PM2 configuration file. Create a file named `pm2
module.exports = {
name: "app", // Name of your application
script: "index.ts", // Entry point of your application
interpreter: "~/.bun/bin/bun", // Path to the Bun interpreter
interpreter: "bun", // Bun interpreter
env: {
PATH: `${process.env.HOME}/.bun/bin:${process.env.PATH}`, // Add "~/.bun/bin/bun" to PATH
}
};
```

View File

@@ -2,56 +2,62 @@
name: Build an app with SvelteKit and Bun
---
Use `bun create` to scaffold your app with the `svelte` package. Answer the prompts to select a template and set up your development environment.
Use `sv create my-app` to create a SvelteKit project with SvelteKit CLI. Answer the prompts to select a template and set up your development environment.
```sh
$ bun create svelte@latest my-app
┌ Welcome to SvelteKit!
$ bunx sv create my-app
┌ Welcome to the Svelte CLI! (v0.5.7)
◇ Which Svelte app template?
│ SvelteKit demo app
◇ Which template would you like?
│ SvelteKit demo
◇ Add type checking with TypeScript?
│ Yes, using TypeScript syntax
◇ Add type checking with Typescript?
│ Yes, using Typescript syntax
Select additional options (use arrow keys/space bar)
│ None
Project created
└ Your project is ready!
✔ Typescript
Inside Svelte components, use <script lang="ts">
Install community-maintained integrations:
https://github.com/svelte-add/svelte-add
◇ What would you like to add to your project?
│ none
Which package manager do you want to install dependencies with?
│ bun
Successfully installed dependencies
◇ Project next steps ─────────────────────────────────────────────────────╮
│ │
│ 1: cd my-app │
│ 2: git init && git add -A && git commit -m "Initial commit" (optional)
│ 3: bun run dev -- --open │
│ │
│ To close the dev server, hit Ctrl-C │
│ │
│ Stuck? Visit us at https://svelte.dev/chat │
│ │
├──────────────────────────────────────────────────────────────────────────╯
└ You're all set!
```
---
Once the project is initialized, `cd` into the new project and install dependencies.
```sh
$ cd my-app
$ bun install
```
---
Once the project is initialized, `cd` into the new project. You don't need to run 'bun install' since the dependencies are already installed.
Then start the development server with `bun --bun run dev`.
To run the dev server with Node.js instead of Bun, you can omit the `--bun` flag.
```sh
$ cd my-app
$ bun --bun run dev
$ vite dev
Forced re-optimization of dependencies
VITE v4.4.9 ready in 895 ms
VITE v5.4.10 ready in 424 ms
➜ Local: http://localhost:5173/
➜ Network: use --host to expose
➜ press h to show help
➜ press h + enter to show help
```
---
@@ -75,16 +81,22 @@ Now, make the following changes to your `svelte.config.js`.
```ts-diff
- import adapter from "@sveltejs/adapter-auto";
+ import adapter from "svelte-adapter-bun";
import { vitePreprocess } from "@sveltejs/kit/vite";
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
/** @type {import('@sveltejs/kit').Config} */
const config = {
kit: {
adapter: adapter(),
},
preprocess: vitePreprocess(),
// Consult https://svelte.dev/docs/kit/integrations#preprocessors
// for more information about preprocessors
preprocess: vitePreprocess(),
kit: {
// adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list.
// If your environment is not supported, or you settled on a specific environment, switch out the adapter.
// See https://svelte.dev/docs/kit/adapters for more information about adapters.
adapter: adapter()
}
};
export default config;
```
@@ -93,28 +105,21 @@ Now, make the following changes to your `svelte.config.js`.
To build a production bundle:
```sh
$ bun run build
$ vite build
vite v4.4.9 building SSR bundle for production...
transforming (60) node_modules/@sveltejs/kit/src/utils/escape.js
✓ 98 modules transformed.
Generated an empty chunk: "entries/endpoints/waitlist/_server.ts".
vite v4.4.9 building for production...
✓ 92 modules transformed.
Generated an empty chunk: "7".
.svelte-kit/output/client/_app/version.json 0.03 kB │ gzip: 0.05 kB
...
.svelte-kit/output/server/index.js 86.47 kB
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
✓ built in 7.81s
$ bun --bun run build
$ vite build
vite v5.4.10 building SSR bundle for production...
"confetti" is imported from external module "@neoconfetti/svelte" but never used in "src/routes/sverdle/+page.svelte".
✓ 130 modules transformed.
vite v5.4.10 building for production...
✓ 148 modules transformed.
...
✓ built in 231ms
...
✓ built in 899ms
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
```

View File

@@ -65,7 +65,7 @@ Some methods are not optimized yet.
### [`node:http2`](https://nodejs.org/api/http2.html)
🟡 Client is supported, but server isn't yet.
🟡 Client & server are implemented (95.25% of gRPC's test suite passes). Missing `options.allowHTTP1`, `options.enableConnectProtocol`, ALTSVC extension, and `http2stream.pushStream`.
### [`node:https`](https://nodejs.org/api/https.html)

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.31",
"version": "1.1.34",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -3113,32 +3113,50 @@ declare module "bun" {
* @example \x1b[38;2;100;200;200m
*/
| "ansi"
| "ansi-16"
| "ansi-16m"
/**
* 256 color ANSI color string, for use in terminals which don't support true color
*
* Tries to match closest 24-bit color to 256 color palette
*/
| "ansi256"
| "ansi-256"
/**
* Picks the format that produces the shortest output
*/
| "css"
/**
* Lowercase hex color string without alpha
* @example #aabb11
* @example #ff9800
*/
| "hex"
/**
* Uppercase hex color string without alpha
* @example #FF9800
*/
| "HEX"
/**
* @example hsl(35.764706, 1, 0.5)
*/
| "hsl"
/**
* @example lab(0.72732764, 33.938198, -25.311619)
*/
| "lab"
/**
* @example 16750592
*/
| "number"
/**
* RGB color string without alpha
* rgb(100, 200, 200)
* @example rgb(255, 152, 0)
*/
| "rgb"
/**
* RGB color string with alpha
* rgba(100, 200, 200, 0.5)
* @example rgba(255, 152, 0, 1)
*/
| "rgba"
| "hsl"
| "lab"
| "css"
| "lab"
| "HEX",
| "rgba",
): string | null;
function color(
@@ -3231,7 +3249,7 @@ declare module "bun" {
}
const unsafe: Unsafe;
type DigestEncoding = "hex" | "base64";
type DigestEncoding = "utf8" | "ucs2" | "utf16le" | "latin1" | "ascii" | "base64" | "base64url" | "hex";
/**
* Are ANSI colors enabled for stdin and stdout?
@@ -5446,6 +5464,57 @@ declare module "bun" {
*/
match(str: string): boolean;
}
/**
* Generate a UUIDv7, which is a sequential ID based on the current timestamp with a random component.
*
* When the same timestamp is used multiple times, a monotonically increasing
* counter is appended to allow sorting. The final 8 bytes are
* cryptographically random. When the timestamp changes, the counter resets to
* a psuedo-random integer.
*
* @param encoding "hex" | "base64" | "base64url"
* @param timestamp Unix timestamp in milliseconds, defaults to `Date.now()`
*
* @example
* ```js
* import { randomUUIDv7 } from "bun";
* const array = [
* randomUUIDv7(),
* randomUUIDv7(),
* randomUUIDv7(),
* ]
* [
* "0192ce07-8c4f-7d66-afec-2482b5c9b03c",
* "0192ce07-8c4f-7d67-805f-0f71581b5622",
* "0192ce07-8c4f-7d68-8170-6816e4451a58"
* ]
* ```
*/
function randomUUIDv7(
/**
* @default "hex"
*/
encoding?: "hex" | "base64" | "base64url",
/**
* @default Date.now()
*/
timestamp?: number | Date,
): string;
/**
* Generate a UUIDv7 as a Buffer
*
* @param encoding "buffer"
* @param timestamp Unix timestamp in milliseconds, defaults to `Date.now()`
*/
function randomUUIDv7(
encoding: "buffer",
/**
* @default Date.now()
*/
timestamp?: number | Date,
): Buffer;
}
// extends lib.dom.d.ts

View File

@@ -1673,7 +1673,36 @@ declare global {
groupEnd(): void;
info(...data: any[]): void;
log(...data: any[]): void;
/** Does nothing currently */
/**
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
* logging the argument if it can't be parsed as tabular.
*
* ```js
* // These can't be parsed as tabular data
* console.table(Symbol());
* // Symbol()
*
* console.table(undefined);
* // undefined
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
* // ┌────┬─────┬─────┐
* // │ │ a │ b │
* // ├────┼─────┼─────┤
* // │ 0 │ 1 │ 'Y' │
* // │ 1 │ 'Z' │ 2 │
* // └────┴─────┴─────┘
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
* // ┌────┬─────┐
* // │ │ a │
* // ├────┼─────┤
* // │ 0 │ 1 │
* // │ 1 │ 'Z' │
* // └────┴─────┘
* ```
* @param properties Alternate properties for constructing the table.
*/
table(tabularData?: any, properties?: string[]): void;
/**
* Begin a timer to log with {@link console.timeEnd}

View File

@@ -212,12 +212,13 @@ void us_socket_context_add_server_name(int ssl, struct us_socket_context_t *cont
}
#endif
}
void us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
int us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
#ifndef LIBUS_NO_SSL
if (ssl) {
us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
return us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
}
#endif
return 0;
}
/* Remove SNI context */

View File

@@ -855,6 +855,11 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
}
}
if (ERR_peek_error() != 0) {
free_ssl_context(ssl_context);
return NULL;
}
/* This must be free'd with free_ssl_context, not SSL_CTX_free */
return ssl_context;
}
@@ -1106,6 +1111,8 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
SSL_CTX *create_ssl_context_from_bun_options(
struct us_bun_socket_context_options_t options,
enum create_bun_socket_error_t *err) {
ERR_clear_error();
/* Create the context */
SSL_CTX *ssl_context = SSL_CTX_new(TLS_method());
@@ -1211,6 +1218,9 @@ SSL_CTX *create_ssl_context_from_bun_options(
return NULL;
}
// It may return spurious errors here.
ERR_clear_error();
if (options.reject_unauthorized) {
SSL_CTX_set_verify(ssl_context,
SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT,
@@ -1336,7 +1346,7 @@ void us_internal_ssl_socket_context_add_server_name(
}
}
void us_bun_internal_ssl_socket_context_add_server_name(
int us_bun_internal_ssl_socket_context_add_server_name(
struct us_internal_ssl_socket_context_t *context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user) {
@@ -1344,6 +1354,9 @@ void us_bun_internal_ssl_socket_context_add_server_name(
/* Try and construct an SSL_CTX from options */
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options, &err);
if (ssl_context == NULL) {
return -1;
}
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
@@ -1351,15 +1364,15 @@ void us_bun_internal_ssl_socket_context_add_server_name(
printf("CANNOT SET EX DATA!\n");
abort();
#endif
return -1;
}
/* We do not want to hold any nullptr's in our SNI tree */
if (ssl_context) {
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
}
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
}
return 0;
}
void us_internal_ssl_socket_context_on_server_name(

View File

@@ -113,6 +113,9 @@ struct us_loop_t *us_timer_loop(struct us_timer_t *t) {
#if defined(LIBUS_USE_EPOLL)
#include <sys/syscall.h>
#include <signal.h>
#include <errno.h>
static int has_epoll_pwait2 = -1;
#ifndef SYS_epoll_pwait2
@@ -122,18 +125,21 @@ static int has_epoll_pwait2 = -1;
#define SYS_epoll_pwait2 441
#endif
static ssize_t sys_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout, const sigset_t *sigmask, size_t sigsetsize) {
return syscall(SYS_epoll_pwait2, epfd, events, maxevents, timeout, sigmask, sigsetsize);
}
extern ssize_t sys_epoll_pwait2(int epfd, struct epoll_event* events, int maxevents,
const struct timespec* timeout, const sigset_t* sigmask);
static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents, const struct timespec *timeout) {
int ret;
sigset_t mask;
sigemptyset(&mask);
if (has_epoll_pwait2 != 0) {
do {
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, NULL, 0);
} while (IS_EINTR(ret));
ret = sys_epoll_pwait2(epfd, events, maxevents, timeout, &mask);
} while (ret == -EINTR);
if (LIKELY(ret != -1 || errno != ENOSYS)) {
if (LIKELY(ret != -ENOSYS && ret != -EPERM && ret != -EOPNOTSUPP)) {
return ret;
}
@@ -146,7 +152,7 @@ static int bun_epoll_pwait2(int epfd, struct epoll_event *events, int maxevents,
}
do {
ret = epoll_wait(epfd, events, maxevents, timeoutMs);
ret = epoll_pwait(epfd, events, maxevents, timeoutMs, &mask);
} while (IS_EINTR(ret));
return ret;
@@ -230,6 +236,8 @@ void us_loop_run(struct us_loop_t *loop) {
}
}
extern int Bun__JSC_onBeforeWait(void*);
extern void Bun__JSC_onAfterWait(void*);
void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout) {
if (loop->num_polls == 0)
@@ -246,6 +254,11 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
/* Emit pre callback */
us_internal_loop_pre(loop);
int needs_after_wait = 0;
if (loop->data.jsc_vm) {
needs_after_wait = Bun__JSC_onBeforeWait(loop->data.jsc_vm);
}
/* Fetch ready polls */
#ifdef LIBUS_USE_EPOLL
@@ -256,6 +269,10 @@ void us_loop_run_bun_tick(struct us_loop_t *loop, const struct timespec* timeout
} while (IS_EINTR(loop->num_ready_polls));
#endif
if (needs_after_wait) {
Bun__JSC_onAfterWait(loop->data.jsc_vm);
}
/* Iterate ready polls, dispatching them by type */
for (loop->current_ready_poll = 0; loop->current_ready_poll < loop->num_ready_polls; loop->current_ready_poll++) {
struct us_poll_t *poll = GET_READY_POLL(loop, loop->current_ready_poll);

View File

@@ -302,7 +302,7 @@ void us_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern, struct us_socket_context_options_t options,
void *user);
void us_bun_internal_ssl_socket_context_add_server_name(
int us_bun_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user);

View File

@@ -44,6 +44,7 @@ struct us_internal_loop_data_t {
char parent_tag;
/* We do not care if this flips or not, it doesn't matter */
size_t iteration_nr;
void* jsc_vm;
};
#endif // LOOP_DATA_H

View File

@@ -234,7 +234,7 @@ unsigned short us_socket_context_timestamp(int ssl, us_socket_context_r context)
/* Adds SNI domain and cert in asn1 format */
void us_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_socket_context_options_t options, void *user);
void us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
int us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
void us_socket_context_remove_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern);
void us_socket_context_on_server_name(int ssl, us_socket_context_r context, void (*cb)(us_socket_context_r context, const char *hostname));
void *us_socket_server_name_userdata(int ssl, us_socket_r s);

View File

@@ -48,6 +48,7 @@ void us_internal_loop_data_init(struct us_loop_t *loop, void (*wakeup_cb)(struct
loop->data.parent_tag = 0;
loop->data.closed_context_head = 0;
loop->data.jsc_vm = 0;
loop->data.wakeup_async = us_internal_create_async(loop, 1, 0);
us_internal_async_set(loop->data.wakeup_async, (void (*)(struct us_internal_async *)) wakeup_cb);

View File

@@ -16,8 +16,7 @@
* limitations under the License.
*/
// clang-format off
#ifndef UWS_APP_H
#define UWS_APP_H
#include <string>
#include <charconv>
@@ -106,14 +105,17 @@ public:
/* Server name */
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}) {
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}, bool *success = nullptr) {
/* Do nothing if not even on SSL */
if constexpr (SSL) {
/* First we create a new router for this domain */
auto *domainRouter = new HttpRouter<typename HttpContextData<SSL>::RouterData>();
us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
int result = us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
if (success) {
*success = result == 0;
}
}
return std::move(*this);
@@ -238,6 +240,18 @@ public:
httpContext = HttpContext<SSL>::create(Loop::get(), options);
}
TemplatedApp(HttpContext<SSL> &context) {
httpContext = &context;
}
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
if (!httpContext) {
return nullptr;
}
return new TemplatedApp<SSL>(*httpContext);
}
bool constructorFailed() {
return !httpContext;
}
@@ -604,4 +618,3 @@ typedef TemplatedApp<true> SSLApp;
}
#endif // UWS_APP_H

View File

@@ -48,6 +48,7 @@ struct BackPressure {
void clear() {
pendingRemoval = 0;
buffer.clear();
buffer.shrink_to_fit();
}
void reserve(size_t length) {
buffer.reserve(length + pendingRemoval);

View File

@@ -16,8 +16,7 @@
* limitations under the License.
*/
#ifndef UWS_HTTPCONTEXT_H
#define UWS_HTTPCONTEXT_H
#pragma once
/* This class defines the main behavior of HTTP and emits various events */
@@ -27,6 +26,8 @@
#include "AsyncSocket.h"
#include "WebSocketData.h"
#include <string>
#include <map>
#include <string_view>
#include <iostream>
#include "MoveOnlyFunction.h"
@@ -171,7 +172,7 @@ private:
#endif
/* The return value is entirely up to us to interpret. The HttpParser only care for whether the returned value is DIFFERENT or not from passed user */
void *returnedSocket = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
auto [err, returnedSocket] = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
/* For every request we reset the timeout and hang until user makes action */
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
us_socket_timeout(SSL, (us_socket_t *) s, 0);
@@ -180,7 +181,9 @@ private:
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) us_socket_ext(SSL, (us_socket_t *) s);
httpResponseData->offset = 0;
/* Are we not ready for another request yet? Terminate the connection. */
/* Are we not ready for another request yet? Terminate the connection.
* Important for denying async pipelining until, if ever, we want to suppot it.
* Otherwise requests can get mixed up on the same connection. We still support sync pipelining. */
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) {
us_socket_close(SSL, (us_socket_t *) s, 0, nullptr);
return nullptr;
@@ -280,10 +283,6 @@ private:
}
}
return user;
}, [](void *user) {
/* Close any socket on HTTP errors */
us_socket_close(SSL, (us_socket_t *) user, 0, nullptr);
return nullptr;
});
/* Mark that we are no longer parsing Http */
@@ -291,6 +290,9 @@ private:
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
if (returnedSocket == FULLPTR) {
/* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */
us_socket_write(SSL, s, httpErrorResponses[err].data(), (int) httpErrorResponses[err].length(), false);
us_socket_shutdown(SSL, s);
/* Close any socket on HTTP errors */
us_socket_close(SSL, s, 0, nullptr);
/* This just makes the following code act as if the socket was closed from error inside the parser. */
@@ -299,9 +301,8 @@ private:
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
if (returnedSocket != nullptr) {
us_socket_t* returnedSocketPtr = (us_socket_t*) returnedSocket;
/* We don't want open sockets to keep the event loop alive between HTTP requests */
us_socket_unref(returnedSocketPtr);
us_socket_unref((us_socket_t *) returnedSocket);
/* Timeout on uncork failure */
auto [written, failed] = ((AsyncSocket<SSL> *) returnedSocket)->uncork();
@@ -321,7 +322,7 @@ private:
}
}
}
return returnedSocketPtr;
return (us_socket_t *) returnedSocket;
}
/* If we upgraded, check here (differ between nullptr close and nullptr upgrade) */
@@ -483,10 +484,27 @@ public:
return;
}
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler)](auto *r) mutable {
/* Record this route's parameter offsets */
std::map<std::string, unsigned short, std::less<>> parameterOffsets;
unsigned short offset = 0;
for (unsigned int i = 0; i < pattern.length(); i++) {
if (pattern[i] == ':') {
i++;
unsigned int start = i;
while (i < pattern.length() && pattern[i] != '/') {
i++;
}
parameterOffsets[std::string(pattern.data() + start, i - start)] = offset;
//std::cout << "<" << std::string(pattern.data() + start, i - start) << "> is offset " << offset;
offset++;
}
}
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets)](auto *r) mutable {
auto user = r->getUserData();
user.httpRequest->setYield(false);
user.httpRequest->setParameters(r->getParameters());
user.httpRequest->setParameterOffsets(&parameterOffsets);
/* Middleware? Automatically respond to expectations */
std::string_view expect = user.httpRequest->getHeader("expect");
@@ -528,4 +546,4 @@ public:
}
#endif // UWS_HTTPCONTEXT_H

View File

@@ -0,0 +1,53 @@
/*
* Authored by Alex Hultman, 2018-2023.
* Intellectual property of third-party.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef UWS_HTTP_ERRORS
#define UWS_HTTP_ERRORS
#include <string_view>
namespace uWS {
/* Possible errors from http parsing */
enum HttpError {
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
HTTP_ERROR_400_BAD_REQUEST = 3
};
#ifndef UWS_HTTPRESPONSE_NO_WRITEMARK
/* Returned parser errors match this LUT. */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n<h1>HTTP Version Not Supported</h1><p>This server does not support HTTP/1.0.</p><hr><i>uWebSockets/20 Server</i>",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n<h1>Request Header Fields Too Large</h1><hr><i>uWebSockets/20 Server</i>",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n<h1>Bad Request</h1><hr><i>uWebSockets/20 Server</i>",
};
#else
/* Anonymized pages */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
};
#endif
}
#endif

View File

@@ -0,0 +1,42 @@
/*
* Authored by Alex Hultman, 2018-2023.
* Intellectual property of third-party.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string_view>
namespace uWS {
/* Possible errors from http parsing */
enum HttpError {
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
HTTP_ERROR_400_BAD_REQUEST = 3
};
/* Anonymized pages */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,8 +15,7 @@
* limitations under the License.
*/
// clang-format off
#ifndef UWS_HTTPRESPONSEDATA_H
#define UWS_HTTPRESPONSEDATA_H
#pragma once
/* This data belongs to the HttpResponse */
@@ -106,4 +105,4 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
}
#endif // UWS_HTTPRESPONSEDATA_H

View File

@@ -15,9 +15,7 @@
* limitations under the License.
*/
#ifndef UWS_TOPICTREE_H
#define UWS_TOPICTREE_H
#pragma once
#include <map>
#include <list>
#include <iostream>
@@ -366,4 +364,4 @@ public:
}
#endif

View File

@@ -0,0 +1,51 @@
const body = process.env.GITHUB_ISSUE_BODY;
const SENTRY_AUTH_TOKEN = process.env.SENTRY_AUTH_TOKEN;
if (!body || !SENTRY_AUTH_TOKEN) {
throw new Error("Missing environment variables");
}
const id = body.indexOf("<!-- sentry_id: ");
const endIdLine = body.indexOf(" -->", id + 1);
if (!(id > -1 && endIdLine > -1)) {
throw new Error("Missing sentry_id");
}
const sentryId = body.slice(id + "<!-- sentry_id: ".length, endIdLine).trim();
if (!sentryId) {
throw new Error("Missing sentry_id");
}
const response = await fetch(`https://sentry.io/api/0/organizations/4507155222364160/eventids/${sentryId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!response.ok) {
throw new Error(`Failed to fetch Sentry event: ${response.statusText}`);
}
const json = await response.json();
const groupId = json?.groupId;
if (!groupId) {
throw new Error("Missing groupId");
}
const issueResponse = await fetch(`https://sentry.io/api/0/issues/${groupId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!issueResponse.ok) {
throw new Error(`Failed to fetch Sentry issue: ${issueResponse.statusText}`);
}
const { shortId, permalink } = await issueResponse.json();
if (!shortId || !permalink) {
throw new Error("Missing shortId or permalink");
}
console.log(`Sentry ID: ${shortId}`);
console.log(`Sentry permalink: ${permalink}`);
await Bun.write("sentry-id.txt", shortId);
await Bun.write("sentry-link.txt", permalink);
export {};

714
scripts/bootstrap.sh Executable file
View File

@@ -0,0 +1,714 @@
#!/bin/sh
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
# If this script does not work on your machine, please open an issue:
# https://github.com/oven-sh/bun/issues
# If you need to make a change to this script, such as upgrading a dependency,
# increment the version number to indicate that a new image should be built.
# Otherwise, the existing image will be retroactively updated.
v="3"
pid=$$
script="$(realpath "$0")"
print() {
echo "$@"
}
error() {
echo "error: $@" >&2
kill -s TERM "$pid"
exit 1
}
execute() {
print "$ $@" >&2
if ! "$@"; then
error "Command failed: $@"
fi
}
execute_sudo() {
if [ "$sudo" = "1" ]; then
execute "$@"
else
execute sudo "$@"
fi
}
execute_non_root() {
if [ "$sudo" = "1" ]; then
execute sudo -u "$user" "$@"
else
execute "$@"
fi
}
which() {
command -v "$1"
}
require() {
path="$(which "$1")"
if ! [ -f "$path" ]; then
error "Command \"$1\" is required, but is not installed."
fi
echo "$path"
}
fetch() {
curl=$(which curl)
if [ -f "$curl" ]; then
execute "$curl" -fsSL "$1"
else
wget=$(which wget)
if [ -f "$wget" ]; then
execute "$wget" -qO- "$1"
else
error "Command \"curl\" or \"wget\" is required, but is not installed."
fi
fi
}
download_file() {
url="$1"
filename="${2:-$(basename "$url")}"
path="$(mktemp -d)/$filename"
fetch "$url" > "$path"
print "$path"
}
compare_version() {
if [ "$1" = "$2" ]; then
echo "0"
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
echo "-1"
else
echo "1"
fi
}
append_to_file() {
file="$1"
content="$2"
if ! [ -f "$file" ]; then
execute mkdir -p "$(dirname "$file")"
execute touch "$file"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" >> "$file"
fi
done
}
append_to_profile() {
content="$1"
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
for profile in $profiles; do
file="$HOME/$profile"
if [ "$ci" = "1" ] || [ -f "$file" ]; then
append_to_file "$file" "$content"
fi
done
}
append_to_path() {
path="$1"
if ! [ -d "$path" ]; then
error "Could not find directory: \"$path\""
fi
append_to_profile "export PATH=\"$path:\$PATH\""
export PATH="$path:$PATH"
}
check_system() {
uname="$(require uname)"
os="$($uname -s)"
case "$os" in
Linux*) os="linux" ;;
Darwin*) os="darwin" ;;
*) error "Unsupported operating system: $os" ;;
esac
arch="$($uname -m)"
case "$arch" in
x86_64 | x64 | amd64) arch="x64" ;;
aarch64 | arm64) arch="aarch64" ;;
*) error "Unsupported architecture: $arch" ;;
esac
kernel="$(uname -r)"
if [ "$os" = "darwin" ]; then
sw_vers="$(which sw_vers)"
if [ -f "$sw_vers" ]; then
distro="$($sw_vers -productName)"
release="$($sw_vers -productVersion)"
fi
if [ "$arch" = "x64" ]; then
sysctl="$(which sysctl)"
if [ -f "$sysctl" ] && [ "$($sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then
arch="aarch64"
rosetta="1"
fi
fi
fi
if [ "$os" = "linux" ] && [ -f /etc/os-release ]; then
. /etc/os-release
if [ -n "$ID" ]; then
distro="$ID"
fi
if [ -n "$VERSION_ID" ]; then
release="$VERSION_ID"
fi
fi
if [ "$os" = "linux" ]; then
rpm="$(which rpm)"
if [ -f "$rpm" ]; then
glibc="$($rpm -q glibc --queryformat '%{VERSION}\n')"
else
ldd="$(which ldd)"
awk="$(which awk)"
if [ -f "$ldd" ] && [ -f "$awk" ]; then
glibc="$($ldd --version | $awk 'NR==1{print $NF}')"
fi
fi
fi
if [ "$os" = "darwin" ]; then
brew="$(which brew)"
pm="brew"
fi
if [ "$os" = "linux" ]; then
apt="$(which apt-get)"
if [ -f "$apt" ]; then
pm="apt"
else
dnf="$(which dnf)"
if [ -f "$dnf" ]; then
pm="dnf"
else
yum="$(which yum)"
if [ -f "$yum" ]; then
pm="yum"
fi
fi
fi
if [ -z "$pm" ]; then
error "No package manager found. (apt, dnf, yum)"
fi
fi
if [ -n "$SUDO_USER" ]; then
user="$SUDO_USER"
else
whoami="$(which whoami)"
if [ -f "$whoami" ]; then
user="$($whoami)"
else
error "Could not determine the current user, set \$USER."
fi
fi
id="$(which id)"
if [ -f "$id" ] && [ "$($id -u)" = "0" ]; then
sudo=1
fi
if [ "$CI" = "true" ]; then
ci=1
fi
print "System information:"
if [ -n "$distro" ]; then
print "| Distro: $distro $release"
fi
print "| Operating system: $os"
print "| Architecture: $arch"
if [ -n "$rosetta" ]; then
print "| Rosetta: true"
fi
if [ -n "$glibc" ]; then
print "| Glibc: $glibc"
fi
print "| Package manager: $pm"
print "| User: $user"
if [ -n "$sudo" ]; then
print "| Sudo: true"
fi
if [ -n "$ci" ]; then
print "| CI: true"
fi
}
package_manager() {
case "$pm" in
apt) DEBIAN_FRONTEND=noninteractive \
execute "$apt" "$@" ;;
dnf) execute dnf "$@" ;;
yum) execute "$yum" "$@" ;;
brew)
if ! [ -f "$(which brew)" ]; then
install_brew
fi
execute_non_root brew "$@"
;;
*) error "Unsupported package manager: $pm" ;;
esac
}
update_packages() {
case "$pm" in
apt)
package_manager update
;;
esac
}
check_package() {
case "$pm" in
apt)
apt-cache policy "$1"
;;
dnf | yum | brew)
package_manager info "$1"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
install_packages() {
case "$pm" in
apt)
package_manager install --yes --no-install-recommends "$@"
;;
dnf)
package_manager install --assumeyes --nodocs --noautoremove --allowerasing "$@"
;;
yum)
package_manager install -y "$@"
;;
brew)
package_manager install --force --formula "$@"
package_manager link --force --overwrite "$@"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
get_version() {
command="$1"
path="$(which "$command")"
if [ -f "$path" ]; then
case "$command" in
go | zig) "$path" version ;;
*) "$path" --version ;;
esac
else
print "not found"
fi
}
install_brew() {
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
NONINTERACTIVE=1 execute_non_root "$bash" "$script"
case "$arch" in
x64)
append_to_path "/usr/local/bin"
;;
aarch64)
append_to_path "/opt/homebrew/bin"
;;
esac
case "$ci" in
1)
append_to_profile "export HOMEBREW_NO_INSTALL_CLEANUP=1"
append_to_profile "export HOMEBREW_NO_AUTO_UPDATE=1"
append_to_profile "export HOMEBREW_NO_ANALYTICS=1"
;;
esac
}
install_common_software() {
case "$pm" in
apt) install_packages \
apt-transport-https \
software-properties-common
;;
dnf) install_packages \
dnf-plugins-core \
tar
;;
esac
install_packages \
bash \
ca-certificates \
curl \
jq \
htop \
gnupg \
git \
unzip \
wget \
zip
install_rosetta
install_nodejs
install_bun
}
install_nodejs() {
version="${1:-"22"}"
if ! [ "$(compare_version "$glibc" "2.27")" = "1" ]; then
version="16"
fi
case "$pm" in
dnf | yum)
bash="$(require bash)"
script=$(download_file "https://rpm.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
apt)
bash="$(require bash)"
script=$(download_file "https://deb.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
esac
install_packages nodejs
}
install_bun() {
bash="$(require bash)"
script=$(download_file "https://bun.sh/install")
version="${1:-"latest"}"
case "$version" in
latest)
execute "$bash" "$script"
;;
*)
execute "$bash" "$script" -s "$version"
;;
esac
append_to_path "$HOME/.bun/bin"
}
install_rosetta() {
case "$os" in
darwin)
if ! [ "$(which arch)" ]; then
execute softwareupdate \
--install-rosetta \
--agree-to-license
fi
;;
esac
}
install_build_essentials() {
case "$pm" in
apt) install_packages \
build-essential \
ninja-build \
xz-utils
;;
dnf | yum) install_packages \
ninja-build \
gcc-c++ \
xz
;;
brew) install_packages \
ninja
;;
esac
install_packages \
make \
cmake \
pkg-config \
python3 \
libtool \
ruby \
perl \
golang
install_llvm
install_ccache
install_rust
install_docker
}
llvm_version_exact() {
case "$os" in
linux)
print "16.0.6"
;;
darwin | windows)
print "18.1.8"
;;
esac
}
llvm_version() {
echo "$(llvm_version_exact)" | cut -d. -f1
}
install_llvm() {
case "$pm" in
apt)
bash="$(require bash)"
script=$(download_file "https://apt.llvm.org/llvm.sh")
execute "$bash" "$script" "$(llvm_version)" all
;;
brew)
install_packages "llvm@$(llvm_version)"
;;
esac
}
install_ccache() {
case "$pm" in
apt | brew)
install_packages ccache
;;
esac
}
install_rust() {
sh="$(require sh)"
script=$(download_file "https://sh.rustup.rs")
execute "$sh" "$script" -y
append_to_path "$HOME/.cargo/bin"
}
install_docker() {
case "$pm" in
brew)
if ! [ -d "/Applications/Docker.app" ]; then
package_manager install docker --cask
fi
;;
*)
case "$distro-$release" in
amzn-2 | amzn-1)
execute amazon-linux-extras install docker
;;
amzn-*)
install_packages docker
;;
*)
sh="$(require sh)"
script=$(download_file "https://get.docker.com")
execute "$sh" "$script"
;;
esac
;;
esac
systemctl="$(which systemctl)"
if [ -f "$systemctl" ]; then
execute "$systemctl" enable docker
fi
}
install_ci_dependencies() {
if ! [ "$ci" = "1" ]; then
return
fi
install_tailscale
install_buildkite
}
install_tailscale() {
case "$os" in
linux)
sh="$(require sh)"
script=$(download_file "https://tailscale.com/install.sh")
execute "$sh" "$script"
;;
darwin)
install_packages go
execute_non_root go install tailscale.com/cmd/tailscale{,d}@latest
append_to_path "$HOME/go/bin"
;;
esac
}
install_buildkite() {
home_dir="/var/lib/buildkite-agent"
config_dir="/etc/buildkite-agent"
config_file="$config_dir/buildkite-agent.cfg"
if ! [ -d "$home_dir" ]; then
execute_sudo mkdir -p "$home_dir"
fi
if ! [ -d "$config_dir" ]; then
execute_sudo mkdir -p "$config_dir"
fi
case "$os" in
linux)
getent="$(require getent)"
if [ -z "$("$getent" passwd buildkite-agent)" ]; then
useradd="$(require useradd)"
execute "$useradd" buildkite-agent \
--system \
--no-create-home \
--home-dir "$home_dir"
fi
if [ -n "$("$getent" group docker)" ]; then
usermod="$(require usermod)"
execute "$usermod" -aG docker buildkite-agent
fi
execute chown -R buildkite-agent:buildkite-agent "$home_dir"
execute chown -R buildkite-agent:buildkite-agent "$config_dir"
;;
darwin)
execute_sudo chown -R "$user:admin" "$home_dir"
execute_sudo chown -R "$user:admin" "$config_dir"
;;
esac
if ! [ -f "$config_file" ]; then
cat <<EOF >"$config_file"
# This is generated by scripts/bootstrap.sh
# https://buildkite.com/docs/agent/v3/configuration
name="%hostname-%random"
tags="v=$v,os=$os,arch=$arch,distro=$distro,release=$release,kernel=$kernel,glibc=$glibc"
build-path="$home_dir/builds"
git-mirrors-path="$home_dir/git"
job-log-path="$home_dir/logs"
plugins-path="$config_dir/plugins"
hooks-path="$config_dir/hooks"
no-ssh-keyscan=true
cancel-grace-period=3600000 # 1 hour
enable-job-log-tmpfile=true
experiment="normalised-upload-paths,resolve-commit-after-checkout,agent-api"
EOF
fi
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")
execute "$bash" "$script"
out_dir="$HOME/.buildkite-agent"
execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/local/bin/buildkite-agent"
execute rm -rf "$out_dir"
}
install_chrome_dependencies() {
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud
case "$pm" in
apt)
install_packages \
fonts-liberation \
libatk-bridge2.0-0 \
libatk1.0-0 \
libc6 \
libcairo2 \
libcups2 \
libdbus-1-3 \
libexpat1 \
libfontconfig1 \
libgbm1 \
libgcc1 \
libglib2.0-0 \
libgtk-3-0 \
libnspr4 \
libnss3 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libstdc++6 \
libx11-6 \
libx11-xcb1 \
libxcb1 \
libxcomposite1 \
libxcursor1 \
libxdamage1 \
libxext6 \
libxfixes3 \
libxi6 \
libxrandr2 \
libxrender1 \
libxss1 \
libxtst6 \
xdg-utils
# Fixes issue in newer version of Ubuntu:
# Package 'libasound2' has no installation candidate
if [ "$(check_package "libasound2t64")" ]; then
install_packages libasound2t64
else
install_packages libasound2
fi
;;
dnf | yum)
install_packages \
alsa-lib \
atk \
cups-libs \
gtk3 \
ipa-gothic-fonts \
libXcomposite \
libXcursor \
libXdamage \
libXext \
libXi \
libXrandr \
libXScrnSaver \
libXtst \
pango \
xorg-x11-fonts-100dpi \
xorg-x11-fonts-75dpi \
xorg-x11-fonts-cyrillic \
xorg-x11-fonts-misc \
xorg-x11-fonts-Type1 \
xorg-x11-utils
;;
esac
}
main() {
check_system
update_packages
install_common_software
install_build_essentials
install_chrome_dependencies
install_ci_dependencies
}
main

View File

@@ -130,7 +130,10 @@ function getCachePath(branch) {
const repository = process.env.BUILDKITE_REPO;
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
const branchKey = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const branchName = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const branchKey = branchName.startsWith("gh-readonly-queue-")
? branchName.slice(18, branchName.indexOf("-pr-"))
: branchName;
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
}

View File

@@ -1010,9 +1010,16 @@ async function getExecPathFromBuildKite(target) {
const releasePath = join(cwd, "release");
mkdirSync(releasePath, { recursive: true });
const args = ["artifact", "download", "**", releasePath, "--step", target];
const buildId = process.env["BUILDKITE_ARTIFACT_BUILD_ID"];
if (buildId) {
args.push("--build", buildId);
}
await spawnSafe({
command: "buildkite-agent",
args: ["artifact", "download", "**", releasePath, "--step", target],
args,
});
let zipPath;

View File

@@ -676,7 +676,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
}
// There's two parts to this.
// 1. Storing the underyling string.
// 1. Storing the underlying string.
// 2. Making the key accessible at the index.
pub fn putKey(self: *Self, key: anytype, result: *Result) !void {
self.map.mutex.lock();

View File

@@ -124,6 +124,18 @@ pub const Features = struct {
return Formatter{};
}
const JSC = bun.JSC;
pub fn toJS(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue {
const object = JSC.JSValue.createEmptyObjectWithNullPrototype(globalThis);
inline for (comptime std.meta.declarations(Features)) |decl| {
if (@typeInfo(@TypeOf(@field(Features, decl.name))) == .Int) {
object.put(globalThis, decl.name, JSC.JSValue.jsNumber(@field(Features, decl.name)));
}
}
object.put(globalThis, "concurrentDecompressionCount", JSC.JSValue.jsNumber(bun.http.HTTPClientResult.DecompressionTask.count.load(.monotonic)));
return object;
}
pub const Formatter = struct {
pub fn format(_: Formatter, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
const fields = comptime brk: {
@@ -353,3 +365,5 @@ pub const GenerateHeader = struct {
}
};
};
pub const createInternalStatsObject = Features.toJS;

View File

@@ -55,11 +55,11 @@ pub const KeepAlive = struct {
this.status = .inactive;
if (comptime @TypeOf(event_loop_ctx_) == JSC.EventLoopHandle) {
event_loop_ctx_.loop().subActive(1);
event_loop_ctx_.loop().unref();
return;
}
const event_loop_ctx = JSC.AbstractVM(event_loop_ctx_);
event_loop_ctx.platformEventLoop().subActive(1);
event_loop_ctx.platformEventLoop().unref();
}
/// From another thread, Prevent a poll from keeping the process alive.

View File

@@ -1,79 +0,0 @@
// clang-format off
#include "BakeDevSourceProvider.h"
#include "BakeDevGlobalObject.h"
#include "JavaScriptCore/Completion.h"
#include "JavaScriptCore/Identifier.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSLock.h"
#include "JavaScriptCore/JSMap.h"
#include "JavaScriptCore/JSModuleLoader.h"
#include "JavaScriptCore/JSString.h"
#include "JavaScriptCore/JSModuleNamespaceObject.h"
namespace Bake {
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(DevGlobalObject* global, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
JSC::JSString* key = JSC::jsString(vm, string);
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, {});
JSC::JSInternalPromise* internalPromise = global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
RETURN_IF_EXCEPTION(scope, {});
return { internalPromise, key };
}
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(DevGlobalObject* global, BunString source) {
JSC::VM&vm=global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.patch.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Program
));
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
RELEASE_ASSERT(result);
return JSC::JSValue::encode(result);
}
extern "C" JSC::EncodedJSValue BakeGetRequestHandlerFromModule(
DevGlobalObject* global,
JSC::JSString* key
) {
JSC::VM&vm = global->vm();
JSC::JSMap* map = JSC::jsCast<JSC::JSMap*>(
global->moduleLoader()->getDirect(
vm, JSC::Identifier::fromString(global->vm(), "registry"_s)
));
JSC::JSValue entry = map->get(global, key);
ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise
JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s));
ASSERT(module.isCell());
JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module);
ASSERT(namespaceObject);
return JSC::JSValue::encode(namespaceObject->get(global, vm.propertyNames->defaultKeyword));
}
} // namespace Bake

View File

@@ -1,4 +1,4 @@
#include "BakeDevGlobalObject.h"
#include "BakeGlobalObject.h"
#include "JSNextTickQueue.h"
#include "JavaScriptCore/GlobalObjectMethodTable.h"
#include "JavaScriptCore/JSInternalPromise.h"
@@ -14,34 +14,61 @@ extern "C" void BakeInitProcessIdentifier()
}
JSC::JSInternalPromise*
moduleLoaderImportModule(JSC::JSGlobalObject* jsGlobalObject,
bakeModuleLoaderImportModule(JSC::JSGlobalObject* jsGlobalObject,
JSC::JSModuleLoader*, JSC::JSString* moduleNameValue,
JSC::JSValue parameters,
const JSC::SourceOrigin& sourceOrigin)
{
// TODO: forward this to the runtime?
JSC::VM& vm = jsGlobalObject->vm();
WTF::String keyString = moduleNameValue->getString(jsGlobalObject);
auto err = JSC::createTypeError(
jsGlobalObject,
WTF::makeString(
"Dynamic import should have been replaced with a hook into the module runtime"_s));
"Dynamic import to '"_s, keyString,
"' should have been replaced with a hook into the module runtime"_s));
auto* promise = JSC::JSInternalPromise::create(
vm, jsGlobalObject->internalPromiseStructure());
promise->reject(jsGlobalObject, err);
return promise;
}
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal,
JSC::JSModuleLoader* loader, JSC::JSValue key,
JSC::JSValue referrer, JSC::JSValue origin)
{
Bake::GlobalObject* global = jsCast<Bake::GlobalObject*>(jsGlobal);
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (global->isProduction()) {
WTF::String keyString = key.toWTFString(global);
RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier);
ASSERT(referrer.isString());
auto refererString = jsCast<JSC::JSString*>(referrer)->value(global);
BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString));
return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy));
} else {
JSC::throwTypeError(global, scope, "External imports are not allowed in Bun Bake's dev server. This is a bug in Bun's bundler."_s);
return vm.propertyNames->emptyIdentifier;
}
}
#define INHERIT_HOOK_METHOD(name) \
Zig::GlobalObject::s_globalObjectMethodTable.name
const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable = {
const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
INHERIT_HOOK_METHOD(supportsRichSourceInfo),
INHERIT_HOOK_METHOD(shouldInterruptScript),
INHERIT_HOOK_METHOD(javaScriptRuntimeFlags),
INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop),
INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
moduleLoaderImportModule,
INHERIT_HOOK_METHOD(moduleLoaderResolve),
bakeModuleLoaderImportModule,
bakeModuleLoaderResolve,
INHERIT_HOOK_METHOD(moduleLoaderFetch),
INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
@@ -58,17 +85,16 @@ const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable =
INHERIT_HOOK_METHOD(canCompileStrings),
};
DevGlobalObject*
DevGlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
const JSC::GlobalObjectMethodTable* methodTable)
{
DevGlobalObject* ptr = new (NotNull, JSC::allocateCell<DevGlobalObject>(vm))
DevGlobalObject(vm, structure, methodTable);
GlobalObject* ptr = new (NotNull, JSC::allocateCell<GlobalObject>(vm))
GlobalObject(vm, structure, methodTable);
ptr->finishCreation(vm);
return ptr;
}
void DevGlobalObject::finishCreation(JSC::VM& vm)
void GlobalObject::finishCreation(JSC::VM& vm)
{
Base::finishCreation(vm);
ASSERT(inherits(info()));
@@ -77,7 +103,8 @@ void DevGlobalObject::finishCreation(JSC::VM& vm)
extern "C" BunVirtualMachine* Bun__getVM();
// A lot of this function is taken from 'Zig__GlobalObject__create'
extern "C" DevGlobalObject* BakeCreateDevGlobal(DevServer* owner,
// TODO: remove this entire method
extern "C" GlobalObject* BakeCreateDevGlobal(DevServer* owner,
void* console)
{
JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef();
@@ -86,11 +113,11 @@ extern "C" DevGlobalObject* BakeCreateDevGlobal(DevServer* owner,
BunVirtualMachine* bunVM = Bun__getVM();
WebCore::JSVMClientData::create(&vm, bunVM);
JSC::Structure* structure = DevGlobalObject::createStructure(vm);
DevGlobalObject* global = DevGlobalObject::create(
vm, structure, &DevGlobalObject::s_globalObjectMethodTable);
JSC::Structure* structure = GlobalObject::createStructure(vm);
GlobalObject* global = GlobalObject::create(
vm, structure, &GlobalObject::s_globalObjectMethodTable);
if (!global)
BUN_PANIC("Failed to create DevGlobalObject");
BUN_PANIC("Failed to create BakeGlobalObject");
global->m_devServer = owner;
global->m_bunVM = bunVM;
@@ -115,4 +142,25 @@ extern "C" DevGlobalObject* BakeCreateDevGlobal(DevServer* owner,
return global;
}
extern "C" GlobalObject* BakeCreateProdGlobal(JSC::VM* vm, void* console)
{
JSC::JSLockHolder locker(vm);
BunVirtualMachine* bunVM = Bun__getVM();
JSC::Structure* structure = GlobalObject::createStructure(*vm);
GlobalObject* global = GlobalObject::create(*vm, structure, &GlobalObject::s_globalObjectMethodTable);
if (!global)
BUN_PANIC("Failed to create BakeGlobalObject");
global->m_devServer = nullptr;
global->m_bunVM = bunVM;
JSC::gcProtect(global);
global->setConsole(console);
global->setStackTraceLimit(10); // Node.js defaults to 10
return global;
}
}; // namespace Bake

View File

@@ -8,15 +8,18 @@ struct DevServer; // DevServer.zig
struct Route; // DevServer.zig
struct BunVirtualMachine;
class DevGlobalObject : public Zig::GlobalObject {
class GlobalObject : public Zig::GlobalObject {
public:
using Base = Zig::GlobalObject;
/// Null if in production
DevServer* m_devServer;
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<DevGlobalObject, WebCore::UseCustomHeapCellType::Yes>(
return WebCore::subspaceForImpl<GlobalObject, WebCore::UseCustomHeapCellType::Yes>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceForBakeGlobalScope.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForBakeGlobalScope = std::forward<decltype(space)>(space); },
@@ -26,18 +29,18 @@ public:
}
static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable;
static DevGlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable);
static GlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable);
DevServer* m_devServer;
ALWAYS_INLINE bool isProduction() const { return !m_devServer; }
void finishCreation(JSC::VM& vm);
DevGlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
GlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
: Zig::GlobalObject(vm, structure, methodTable) { }
};
// Zig API
extern "C" void KitInitProcessIdentifier();
extern "C" DevGlobalObject* KitCreateDevGlobal(DevServer* owner, void* console);
extern "C" GlobalObject* KitCreateDevGlobal(DevServer* owner, void* console);
}; // namespace Kit

View File

@@ -0,0 +1,39 @@
#include "BakeProduction.h"
#include "BunBuiltinNames.h"
#include "WebCoreJSBuiltins.h"
#include "JavaScriptCore/JSPromise.h"
#include "JavaScriptCore/Exception.h"
namespace Bake {
extern "C" JSC::JSPromise* BakeRenderRoutesForProd(
JSC::JSGlobalObject* global,
BunString outbase,
JSC::JSValue renderStaticCallback,
JSC::JSValue clientEntryUrl,
JSC::JSValue files,
JSC::JSValue patterns,
JSC::JSValue styles)
{
JSC::VM& vm = global->vm();
JSC::JSFunction* cb = JSC::JSFunction::create(vm, global, WebCore::bakeRenderRoutesForProdCodeGenerator(vm), global);
JSC::CallData callData = JSC::getCallData(cb);
JSC::MarkedArgumentBuffer args;
args.append(JSC::jsString(vm, outbase.toWTFString()));
args.append(renderStaticCallback);
args.append(clientEntryUrl);
args.append(files);
args.append(patterns);
args.append(styles);
NakedPtr<JSC::Exception> returnedException = nullptr;
auto result = JSC::call(global, cb, callData, JSC::jsUndefined(), args, returnedException);
if (UNLIKELY(returnedException)) {
// This should be impossible because it returns a promise.
return JSC::JSPromise::rejectedPromise(global, returnedException->value());
}
return JSC::jsCast<JSC::JSPromise*>(result);
}
} // namespace Bake

View File

@@ -0,0 +1,5 @@
#include "root.h"
#include "headers-handwritten.h"
namespace Bake {
} // namespace Bake

View File

@@ -0,0 +1,127 @@
// clang-format off
#include "BakeSourceProvider.h"
#include "BakeGlobalObject.h"
#include "JavaScriptCore/Completion.h"
#include "JavaScriptCore/Identifier.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSLock.h"
#include "JavaScriptCore/JSMap.h"
#include "JavaScriptCore/JSModuleLoader.h"
#include "JavaScriptCore/JSString.h"
#include "JavaScriptCore/JSModuleNamespaceObject.h"
namespace Bake {
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(GlobalObject* global, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.js"_s;
JSC::JSString* key = JSC::jsString(vm, string);
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, {});
JSC::JSInternalPromise* internalPromise = global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
RETURN_IF_EXCEPTION(scope, {});
return { internalPromise, key };
}
extern "C" JSC::JSInternalPromise* BakeLoadModuleByKey(GlobalObject* global, JSC::JSString* key) {
return global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
}
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunString source) {
JSC::VM&vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.patch.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Program
));
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
RELEASE_ASSERT(result);
return JSC::JSValue::encode(result);
}
extern "C" JSC::EncodedJSValue BakeGetModuleNamespace(
JSC::JSGlobalObject* global,
JSC::JSValue keyValue
) {
JSC::JSString* key = JSC::jsCast<JSC::JSString*>(keyValue);
JSC::VM& vm = global->vm();
JSC::JSMap* map = JSC::jsCast<JSC::JSMap*>(
global->moduleLoader()->getDirect(
vm, JSC::Identifier::fromString(global->vm(), "registry"_s)
));
JSC::JSValue entry = map->get(global, key);
ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise
JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s));
ASSERT(module.isCell());
JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module);
ASSERT(namespaceObject);
return JSC::JSValue::encode(namespaceObject);
}
extern "C" JSC::EncodedJSValue BakeGetDefaultExportFromModule(
JSC::JSGlobalObject* global,
JSC::JSValue keyValue
) {
JSC::VM& vm = global->vm();
return JSC::JSValue::encode(jsCast<JSC::JSModuleNamespaceObject*>(JSC::JSValue::decode(BakeGetModuleNamespace(global, keyValue)))->get(global, vm.propertyNames->defaultKeyword));
}
// There were issues when trying to use JSValue.get from zig
extern "C" JSC::EncodedJSValue BakeGetOnModuleNamespace(
JSC::JSGlobalObject* global,
JSC::JSModuleNamespaceObject* moduleNamespace,
const unsigned char* key,
size_t keyLength
) {
JSC::VM& vm = global->vm();
const auto propertyString = String(StringImpl::createWithoutCopying({ key, keyLength }));
const auto identifier = JSC::Identifier::fromString(vm, propertyString);
const auto property = JSC::PropertyName(identifier);
return JSC::JSValue::encode(moduleNamespace->get(global, property));
}
extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* global, BunString virtualPathName, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = virtualPathName.toWTFString();
JSC::JSString* key = JSC::jsString(vm, string);
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
return JSC::JSValue::encode(key);
}
} // namespace Bake

View File

@@ -1,7 +1,7 @@
#pragma once
#include "root.h"
#include "headers-handwritten.h"
#include "BakeDevGlobalObject.h"
#include "BakeGlobalObject.h"
#include "JavaScriptCore/SourceOrigin.h"
namespace Bake {
@@ -40,9 +40,4 @@ private:
) {}
};
// Zig API
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(DevGlobalObject* global, BunString source);
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(DevGlobalObject* global, BunString source);
extern "C" JSC::EncodedJSValue BakeGetRequestHandlerFromModule(DevGlobalObject* global, JSC::JSString* encodedModule);
} // namespace Bake

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,3 @@
//! Discovers routes from the filesystem, as instructed by the framework
//! configuration. Supports incrementally updating for DevServer, or
//! serializing to a binary for production builds.

72
src/bake/bake.d.ts vendored
View File

@@ -1,6 +1,8 @@
declare module "bun" {
declare function wipDevServerExpectHugeBreakingChanges(options: Bake.Options): never;
type Awaitable<T> = T | Promise<T>;
declare namespace Bake {
interface Options {
/**
@@ -54,7 +56,7 @@ declare module "bun" {
/**
* Add extra modules
*/
builtInModules: Record<string, BuiltInModule>;
builtInModules?: Record<string, BuiltInModule>;
/**
* Bun offers integration for React's Server Components with an
* interface that is generic enough to adapt to any framework.
@@ -83,12 +85,14 @@ declare module "bun" {
* If you are unsure what to set this to for a custom server components
* framework, choose 'false'.
*
* When set `true`, when bundling "use client" components for SSR, these
* files will be placed in a separate bundling graph where `conditions` does
* not include `react-server`.
* When set `true`, bundling "use client" components for SSR will be
* placed in a separate bundling graph without the `react-server`
* condition. All imports that stem from here get re-bundled for
* this second graph, regardless if they actually differ via this
* condition.
*
* The built in framework config for React enables this flag so that server
* components and client components, utilize their own versions of React,
* components and client components utilize their own versions of React,
* despite running in the same process. This facilitates different aspects
* of the server and client react runtimes, such as `async` components only
* being available on the server.
@@ -128,14 +132,40 @@ declare module "bun" {
* during rendering.
*/
serverRegisterClientReferenceExport: string | undefined;
// /**
// * Allow creating client components inside of server-side files by using "use client"
// * as the first line of a function declaration. This is useful for small one-off
// * interactive components. This is behind a flag because it is not a feature of
// * React or Next.js, but rather is implemented because it is possible to.
// *
// * The client versions of these are tree-shaked extremely aggressively: anything
// * not referenced by the function body will be removed entirely.
// */
// allowAnonymousClientComponents: boolean;
}
/** Customize the React Fast Refresh transform. */
interface ReactFastRefreshOptions {
/** @default "react-refresh/runtime" */
/**
* This import has four exports, mirroring "react-refresh/runtime":
*
* `injectIntoGlobalHook(window): void`
* Called on first startup, before the user entrypoint.
*
* `register(component, uniqueId: string): void`
* Called on every function that starts with an uppercase letter. These
* may or may not be components, but they are always functions.
*
* `createSignatureFunctionForTransform(): ReactRefreshSignatureFunction`
* TODO: document. A passing no-op for this api is `return () => {}`
*
* @default "react-refresh/runtime"
*/
importSource: ImportSource | undefined;
}
type ReactRefreshSignatureFunction = () => void | ((func: Function, hash: string, force?: bool, customHooks?: () => Function[]) => void);
/// Will be resolved from the point of view of the framework user's project root
/// Examples: `react-dom`, `./entry_point.tsx`, `/absolute/path.js`
type ImportSource = string;
@@ -145,7 +175,13 @@ declare module "bun" {
* The framework implementation decides and enforces the shape
* of the route module. Bun passes it as an opaque value.
*/
default: (request: Request, routeModule: unknown, routeMetadata: RouteMetadata) => Response;
default: (request: Request, routeModule: unknown, routeMetadata: RouteMetadata) => Awaitable<Response>;
/**
* Static rendering does not take a response in, and can generate
* multiple output files. Note that `import.meta.env.STATIC` will
* be inlined to true during a static build.
*/
staticRender: (routeModule: unknown, routeMetadata: RouteMetadata) => Awaitable<Record<string, Blob | ArrayBuffer>>;
}
interface ClientEntryPoint {
@@ -158,11 +194,25 @@ declare module "bun" {
onServerSideReload?: () => void;
}
/**
* This object and it's children may be re-used between invocations, so it
* is not safe to mutate it at all.
*/
interface RouteMetadata {
/** A list of css files that the route will need to be styled */
styles: string[];
/** A list of js files that the route will need to be interactive */
scripts: string[];
/**
* A list of js files that the route will need to be interactive.
*/
readonly scripts: ReadonlyArray<string>;
/**
* A list of css files that the route will need to be styled.
*/
readonly styles: ReadonlyArray<string>;
/**
* Can be used by the framework to mention the route file. Only provided in
* development mode to prevent leaking these details into production
* builds.
*/
devRoutePath?: string;
}
}

View File

@@ -11,6 +11,8 @@ interface Config {
separateSSRGraph?: true;
// Client
/** Dev Server's `configuration_hash_key` */
version: string;
/** If available, this is the Id of `react-refresh/runtime` */
refresh?: Id;
/**
@@ -39,7 +41,7 @@ declare const side: "client" | "server";
* interface as opposed to a WebSocket connection.
*/
declare var server_exports: {
handleRequest: (req: Request, meta: HandleRequestMeta, id: Id) => any;
handleRequest: (req: Request, routeModuleId: Id, clientEntryUrl: string, styles: string[]) => any;
registerUpdate: (
modules: any,
componentManifestAdd: null | string[],
@@ -47,11 +49,6 @@ declare var server_exports: {
) => void;
};
interface HandleRequestMeta {
// url for script tag
clientEntryPoint: string;
}
/*
* If you are running a debug build of Bun. These debug builds should provide
* helpful information to someone working on the bundler itself.

View File

@@ -3,6 +3,11 @@
//! server, server components, and other integrations. Instead of taking the
//! role as a framework, Bake is tool for frameworks to build on top of.
/// Zig version of TS definition 'Bake.Options' in 'bake.d.ts'
pub const UserOptions = struct {
framework: Framework,
};
/// Temporary function to invoke dev server via JavaScript. Will be
/// replaced with a user-facing API. Refs the event loop forever.
pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue {
@@ -19,7 +24,7 @@ pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JS
, .{});
bun.Output.flush();
const options = devServerOptionsFromJs(global, callframe.argument(0)) catch {
const options = bakeOptionsFromJs(global, callframe.argument(0)) catch {
if (!global.hasException())
global.throwInvalidArguments("invalid arguments", .{});
return .zero;
@@ -149,6 +154,9 @@ pub const Framework = struct {
if (str.eqlComptime("react-server-components")) {
return Framework.react();
}
if (str.eqlComptime("react")) {
return Framework.react();
}
}
if (!opts.isObject()) {
@@ -198,8 +206,22 @@ pub const Framework = struct {
global.throwInvalidArguments("'framework.reactFastRefresh' must be an object or 'true'", .{});
return error.JSError;
}
// in addition to here, this import isnt actually wired up to js_parser where the default is hardcoded.
bun.todoPanic(@src(), "custom react-fast-refresh import source", .{});
const prop = rfr.get(global, "importSource") orelse {
global.throwInvalidArguments("'framework.reactFastRefresh' is missing 'importSource'", .{});
return error.JSError;
};
const str = prop.toBunString(global);
defer str.deref();
if (global.hasException())
return error.JSError;
// Leak
break :brk .{
.import_source = str.toUTF8(bun.default_allocator).slice(),
};
},
.server_components = sc: {
const sc: JSValue = opts.get(global, "serverComponents") orelse {
@@ -256,11 +278,86 @@ pub const Framework = struct {
},
};
}
pub fn initBundler(
framework: *Framework,
allocator: std.mem.Allocator,
log: *bun.logger.Log,
mode: Mode,
comptime renderer: Graph,
out: *bun.bundler.Bundler,
) !void {
out.* = try bun.Bundler.init(
allocator, // TODO: this is likely a memory leak
log,
std.mem.zeroes(bun.Schema.Api.TransformOptions),
null,
);
out.options.target = switch (renderer) {
.client => .browser,
.server, .ssr => .bun,
};
out.options.public_path = switch (renderer) {
.client => DevServer.client_prefix,
.server, .ssr => "",
};
out.options.entry_points = &.{};
out.options.log = log;
out.options.output_format = switch (mode) {
.development => .internal_bake_dev,
.production => .esm,
};
out.options.out_extensions = bun.StringHashMap([]const u8).init(out.allocator);
out.options.hot_module_reloading = mode == .development;
out.options.code_splitting = mode == .production;
// force disable filesystem output, even though bundle_v2
// is special cased to return before that code is reached.
out.options.output_dir = "";
// framework configuration
out.options.react_fast_refresh = mode == .development and renderer == .client and framework.react_fast_refresh != null;
out.options.server_components = framework.server_components != null;
out.options.conditions = try bun.options.ESMConditions.init(allocator, out.options.target.defaultConditions());
if (renderer == .server and framework.server_components != null) {
try out.options.conditions.appendSlice(&.{"react-server"});
}
out.options.production = mode == .production;
out.options.tree_shaking = mode == .production;
out.options.minify_syntax = true; // required for DCE
// out.options.minify_identifiers = mode == .production;
// out.options.minify_whitespace = mode == .production;
out.options.experimental_css = true;
out.options.css_chunking = true;
out.options.framework = framework;
out.configureLinker();
try out.configureDefines();
out.options.jsx.development = mode == .development;
try addImportMetaDefines(allocator, out.options.define, mode, switch (renderer) {
.client => .client,
.server, .ssr => .server,
});
if (mode == .production) {
out.options.entry_naming = "[name]-[hash].[ext]";
out.options.chunk_naming = "chunk-[name]-[hash].[ext]";
}
out.resolver.opts = out.options;
}
};
// TODO: this function leaks memory and bad error handling, but that is OK since
// this API is not finalized.
fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options {
pub fn bakeOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options {
if (!options.isObject()) return error.Invalid;
const routes_js = try options.getArray(global, "routes") orelse return error.Invalid;
@@ -367,18 +464,17 @@ pub fn addImportMetaDefines(
pub const server_virtual_source: bun.logger.Source = .{
.path = bun.fs.Path.initForKitBuiltIn("bun", "bake/server"),
.key_path = bun.fs.Path.initForKitBuiltIn("bun", "bake/server"),
.contents = "", // Virtual
.index = bun.JSAst.Index.bake_server_data,
};
pub const client_virtual_source: bun.logger.Source = .{
.path = bun.fs.Path.initForKitBuiltIn("bun", "bake/client"),
.key_path = bun.fs.Path.initForKitBuiltIn("bun", "bake/client"),
.contents = "", // Virtual
.index = bun.JSAst.Index.bake_client_data,
};
pub const production = @import("./production.zig");
pub const DevServer = @import("./DevServer.zig");
const std = @import("std");

View File

@@ -24,7 +24,7 @@ const root = hydrateRoot(document, <Async />, {
});
export async function onServerSideReload() {
const response = await fetch(location.href, {
const response = await fetch(location.href + '/index.rsc', {
headers: {
Accept: "text/x-component",
},

View File

@@ -0,0 +1,27 @@
// This file is unused by Bun itself, but rather is a tool for
// contributors to hack on `bun-framework-react` without needing
// to compile bun itself. If changes to this are made, please
// update 'pub fn react' in 'bake.zig'
import type { Bake } from "bun";
export function react(): Bake.Framework {
return {
// When the files are embedded in the Bun binary, relative
// path resolution does not work.
builtInModules: {
'bun-framework-react/client.tsx': { path: require.resolve('./client.tsx') },
'bun-framework-react/server.tsx': { path: require.resolve('./server.tsx') },
'bun-framework-react/ssr.tsx': { path: require.resolve('./ssr.tsx') },
},
clientEntryPoint: "bun-framework-react/client.tsx",
serverEntryPoint: "bun-framework-react/server.tsx",
reactFastRefresh: {
importSource: "react-refresh/runtime",
},
serverComponents: {
separateSSRGraph: true,
serverRegisterClientReferenceExport: 'registerClientReference',
serverRuntimeImportSource: 'react-server-dom-webpack/server'
}
};
}

View File

@@ -1,14 +1,43 @@
import type { Bake } from "bun";
import { renderToReadableStream } from "react-server-dom-webpack/server.browser";
import { renderToHtml } from "bun-framework-rsc/ssr.tsx" with { bunBakeGraph: "ssr" };
import { serverManifest } from "bun:bake/server";
import { clientManifest, serverManifest } from "bun:bake/server";
import { join } from 'node:path';
function getPage(route, meta: Bake.RouteMetadata) {
const Route = route.default;
const { styles } = meta;
if (import.meta.env.DEV) {
if (typeof Route !== "function") {
throw new Error(
"Expected the default export of " +
JSON.stringify(meta.devRoutePath) +
" to be a React component, got " +
JSON.stringify(Route),
);
}
}
return (
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Bun + React Server Components</title>
{styles.map(url => (
<link key={url} rel="stylesheet" href={url} />
))}
</head>
<body>
<Route />
</body>
</html>
);
}
// `server.tsx` exports a function to be used for handling user routes. It takes
// in the Request object, the route's module, and extra route metadata.
export default async function (request: Request, route: any, meta: Bake.RouteMetadata): Promise<Response> {
// TODO: be able to signal to Bake that Accept may include this, so that
// static pages can be pre-rendered both as RSC payload + HTML.
export default async function render(request: Request, route: any, meta: Bake.RouteMetadata): Promise<Response> {
// The framework generally has two rendering modes.
// - Standard browser navigation
// - Client-side navigation
@@ -18,21 +47,7 @@ export default async function (request: Request, route: any, meta: Bake.RouteMet
// rendering modes. This is signaled by `client.tsx` via the `Accept` header.
const skipSSR = request.headers.get("Accept")?.includes("text/x-component");
const Route = route.default;
const page = (
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Bun + React Server Components</title>
{meta.styles.map(url => (
<link key={url} rel="stylesheet" href={url} />
))}
</head>
<body>
<Route />
</body>
</html>
);
const page = getPage(route, meta);
// This renders Server Components to a ReadableStream "RSC Payload"
const rscPayload = renderToReadableStream(page, serverManifest);
@@ -44,7 +59,7 @@ export default async function (request: Request, route: any, meta: Bake.RouteMet
}
// One straem is used to render SSR. The second is embedded into the html for browser hydration.
// Note: This approach does not stream the response.
// Note: This approach does not stream the response. That practice is called "react flight" and should be added
const [rscPayload1, rscPayload2] = rscPayload.tee();
const rscPayloadBuffer = Bun.readableStreamToText(rscPayload1);
const rw = new HTMLRewriter();
@@ -69,3 +84,44 @@ export default async function (request: Request, route: any, meta: Bake.RouteMet
}),
);
}
// For static site generation, a different function is given, one without a request object.
export async function renderStatic(route: any, meta: Bake.RouteMetadata) {
const page = getPage(route, meta);
const rscPayload = renderToReadableStream(page, serverManifest);
const [rscPayload1, rscPayload2] = rscPayload.tee();
// Prepare both files in parallel
let [html, rscPayloadBuffer] = await Promise.all([
Bun.readableStreamToText(await renderToHtml(rscPayload2)),
Bun.readableStreamToText(rscPayload1),
]);
const scripts = meta.scripts.map(url => `<script src=${JSON.stringify(url)}></script>`);
html = html.replace('</body>', `<script id="rsc_payload" type="json">${rscPayloadBuffer}</script>${scripts.join('\n')}</body>`);
// Each route generates a directory with framework-provided files. Keys are
// files relative to the route path, and values are anything `Bun.write`
// supports. Streams may result in lower memory usage.
return {
// Directories like `blog/index.html` are preferred over `blog.html` because
// certain static hosts do not support this conversion. By using `index.html`,
// the static build is more portable.
'/index.html': html,
// The RSC payload is provided so client-side can use this file for seamless
// client-side navigation. This is equivalent to 'Accept: text/x-component'
// for the non-static build.s
'/index.rsc': rscPayloadBuffer,
}
}
// This is a hack to make react-server-dom-webpack work with Bun's bundler.
// It will be removed once Bun acquires react-server-dom-bun.
if (!import.meta.env.DEV) {
globalThis.__webpack_require__ = (id: string) => {
console.log("Bun: __webpack_require__", id);
const y = import.meta.require(join(import.meta.dir, id));
console.log({y});
return y;
};
}

View File

@@ -6,7 +6,7 @@ export interface DeserializedFailure {
// If not specified, it is a client-side error.
file: string | null;
messages: BundlerMessage[];
};
}
export interface BundlerMessage {
kind: "bundler";
@@ -50,7 +50,7 @@ function readLogMsg(r: DataViewReader, level: BundlerMessageLevel) {
notes[i] = readLogData(r);
}
return {
kind: 'bundler',
kind: "bundler",
level,
message,
location,

View File

@@ -98,7 +98,7 @@ pre {
cursor: pointer;
}
/* .file-name:hover,
.file-name:hover,
.file-name:focus-visible {
background-color: var(--item-bg-hover);
}
@@ -108,20 +108,21 @@ pre {
font-size: 70%;
}
.file-name:hover::after {
.file-name:hover::after,
.file-name:focus-visible {
content: " (click to open in editor)";
} */
}
.message {
margin: 1rem;
margin-bottom: 0;
}
button+.message {
button + .message {
margin-top: 0.5rem;
}
.message-text>span {
.message-text > span {
color: var(--color);
}
@@ -168,9 +169,8 @@ button+.message {
}
@media (prefers-color-scheme: light) {
.log-warn,
.log-note {
font-weight: bold;
}
}
}

View File

@@ -11,7 +11,13 @@
// added or previous ones are solved.
import { BundlerMessageLevel } from "../enums";
import { css } from "../macros" with { type: "macro" };
import { BundlerMessage, BundlerMessageLocation, BundlerNote, decodeSerializedError, type DeserializedFailure } from "./error-serialization";
import {
BundlerMessage,
BundlerMessageLocation,
BundlerNote,
decodeSerializedError,
type DeserializedFailure,
} from "./error-serialization";
import { DataViewReader } from "./reader";
if (side !== "client") throw new Error("Not client side!");
@@ -172,11 +178,22 @@ export function updateErrorOverlay() {
// Create the element for the root if it does not yet exist.
if (!dom) {
let title;
let btn;
const root = elem("div", { class: "message-group" }, [
elem("button", { class: "file-name" }, [
title = textNode()
]),
(btn = elem("button", { class: "file-name" }, [(title = textNode())])),
]);
btn.addEventListener("click", () => {
const firstLocation = errors.get(owner)?.messages[0]?.location;
if (!firstLocation) return;
let fileName = title.textContent.replace(/^\//, "");
fetch("/_bun/src/" + fileName, {
headers: {
"Open-In-Editor": "1",
"Editor-Line": firstLocation.line.toString(),
"Editor-Column": firstLocation.column.toString(),
},
});
});
dom = { root, title, messages: [] };
// TODO: sorted insert?
domErrorList.appendChild(root);
@@ -203,50 +220,48 @@ export function updateErrorOverlay() {
setModalVisible(true);
}
const bundleLogLevelToName = [
"error",
"warn",
"note",
"debug",
"verbose",
];
const bundleLogLevelToName = ["error", "warn", "note", "debug", "verbose"];
function renderBundlerMessage(msg: BundlerMessage) {
return elem('div', { class: 'message' }, [
renderErrorMessageLine(msg.level, msg.message),
...msg.location ? renderCodeLine(msg.location, msg.level) : [],
...msg.notes.map(renderNote),
].flat(1));
return elem(
"div",
{ class: "message" },
[
renderErrorMessageLine(msg.level, msg.message),
...(msg.location ? renderCodeLine(msg.location, msg.level) : []),
...msg.notes.map(renderNote),
].flat(1),
);
}
function renderErrorMessageLine(level: BundlerMessageLevel, text: string) {
const levelName = bundleLogLevelToName[level];
if(IS_BUN_DEVELOPMENT && !levelName) {
if (IS_BUN_DEVELOPMENT && !levelName) {
throw new Error("Unknown log level: " + level);
}
return elem('div', { class: 'message-text' } , [
elemText('span', { class: 'log-' + levelName }, levelName),
elemText('span', { class: 'log-colon' }, ': '),
elemText('span', { class: 'log-text' }, text),
return elem("div", { class: "message-text" }, [
elemText("span", { class: "log-" + levelName }, levelName),
elemText("span", { class: "log-colon" }, ": "),
elemText("span", { class: "log-text" }, text),
]);
}
function renderCodeLine(location: BundlerMessageLocation, level: BundlerMessageLevel) {
return [
elem('div', { class: 'code-line' }, [
elemText('code', { class: 'line-num' }, `${location.line}`),
elemText('pre', { class: 'code-view' }, location.lineText),
elem("div", { class: "code-line" }, [
elemText("code", { class: "line-num" }, `${location.line}`),
elemText("pre", { class: "code-view" }, location.lineText),
]),
elem("div", { class: "highlight-wrap log-" + bundleLogLevelToName[level] }, [
elemText("span", { class: "space" }, "_".repeat(`${location.line}`.length + location.column - 1)),
elemText("span", { class: "line" }, "_".repeat(location.length)),
]),
elem('div', { class: 'highlight-wrap log-' + bundleLogLevelToName[level] }, [
elemText('span', { class: 'space' }, '_'.repeat(`${location.line}`.length + location.column - 1)),
elemText('span', { class: 'line' }, '_'.repeat(location.length)),
])
];
}
function renderNote(note: BundlerNote) {
return [
renderErrorMessageLine(BundlerMessageLevel.note, note.message),
...note.location ? renderCodeLine(note.location, BundlerMessageLevel.note) : [],
...(note.location ? renderCodeLine(note.location, BundlerMessageLevel.note) : []),
];
}
}

View File

@@ -40,4 +40,8 @@ export class DataViewReader {
hasMoreData() {
return this.cursor < this.view.byteLength;
}
rest() {
return this.view.buffer.slice(this.cursor);
}
}

View File

@@ -2,22 +2,35 @@ const isLocal = location.host === "localhost" || location.host === "127.0.0.1";
function wait() {
return new Promise<void>(done => {
let timer;
let timer: Timer | null = null;
const onBlur = () => {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
};
const onTimeout = () => {
if (timer !== null) clearTimeout(timer);
document.removeEventListener("focus", onTimeout);
window.removeEventListener("focus", onTimeout);
window.removeEventListener("blur", onBlur);
done();
};
document.addEventListener("focus", onTimeout);
timer = setTimeout(
() => {
timer = null;
onTimeout();
},
isLocal ? 2_500 : 30_000,
);
window.addEventListener("focus", onTimeout);
if (document.hasFocus()) {
timer = setTimeout(
() => {
timer = null;
onTimeout();
},
isLocal ? 2_500 : 2_500,
);
window.addEventListener("blur", onBlur);
}
});
}

View File

@@ -1,22 +1,3 @@
// TODO: generate this using information in DevServer.zig
export const enum MessageId {
/// Version packet
version = 86,
/// When visualization mode is enabled, this packet contains
/// the entire serialized IncrementalGraph state.
visualizer = 118,
/// Sent on a successful bundle, containing client code.
hot_update = 40,
/// Sent on a successful bundle, containing a list of
/// routes that are updated.
route_update = 82,
/// Sent when the list of errors changes.
errors = 69,
/// Sent when all errors are cleared. Semi-redundant
errors_cleared = 99,
}
export const enum BundlerMessageLevel {
err = 0,
warn = 1,

View File

@@ -1,75 +0,0 @@
# Kit's WebSocket Protocol
This format is only intended for communication for the browser build of
`hmr-runtime.ts` <-> `DevServer.zig`. Server-side HMR is implemented using a
different interface. This document is aimed for contributors to these
two components; Any other use-case is unsupported.
Every message is to use `.binary`/`ArrayBuffer` transport mode. The first byte
indicates a Message ID, with the length being inferred by the payload size.
All integers are in little-endian
## Client->Server messages
### `v`
Subscribe to visualizer packets (`v`)
## Server->Client messages
### `V`
Version payload. Sent on connection startup. The client should issue a hard-reload
when it does not match the embedded version.
Example:
```
V1.1.30-canary.37+117e1b388
```
### `(`
Hot-module-reloading patch. The entire payload is UTF-8 Encoded JavaScript Payload.
### `R` - Route reload request
Server-side code has reloaded. Client should either refetch the route or perform a hard reload.
- `u32`: Number of updated routes
- For each route:
- `u32`: Route ID
- `u16`: Length of route name.
- `[n]u8`: Route name in UTF-8 encoded text.
### `e` - Error status update
- `u32`: Number of errors removed
- For each removed error:
- `u32` Error owner
- Remainder of payload is repeating each error object:
- `u32` Error owner
- Error Payload
### `v`
Payload for `incremental_visualizer.html`. This can be accessed via `/_bun/incremental_visualizer`.
- `u32`: Number of files in client graph
- For each file in client graph
- `u32`: Length of name. If zero then no other fields are provided.
- `[n]u8`: File path in UTF-8 encoded text
- `u8`: If file is stale, set 1
- `u8`: If file is in server graph, set 1
- `u8`: If file is in ssr graph, set 1
- `u8`: If file is a server-side route root, set 1
- `u8`: If file is a server-side component boundary file, set 1
- `u32`: Number of files in the server graph
- For each file in server graph, repeat the same parser for the clienr graph
- `u32`: Number of client edges. For each,
- `u32`: File index of the dependency file
- `u32`: File index of the imported file
- `u32`: Number of server edges. For each,
- `u32`: File index of the dependency file
- `u32`: File index of the imported file

View File

@@ -7,7 +7,7 @@ import { td } from "./shared";
import { DataViewReader } from "./client/reader";
import { routeMatch } from "./client/route";
import { initWebSocket } from "./client/websocket";
import { MessageId } from "./enums";
import { MessageId } from "./generated";
if (typeof IS_BUN_DEVELOPMENT !== "boolean") {
throw new Error("DCE is configured incorrectly");
@@ -49,18 +49,49 @@ try {
console.error(e);
}
/**
* Map between CSS identifier and its style tag.
* If a file is not present in this map, it might exist as a link tag in the HTML.
*/
const cssStore = new Map<string, CSSStyleSheet>();
let isFirstRun = true;
initWebSocket({
[MessageId.version](view) {
// TODO: config.version and verify everything is sane
console.log("VERSION: ", td.decode(view.buffer.slice(1)));
if (td.decode(view.buffer.slice(1)) !== config.version) {
console.error("Version mismatch, hard-reloading");
location.reload();
}
if (isFirstRun) {
isFirstRun = false;
return;
}
// It would be possible to use `performRouteReload` to do a hot-reload,
// but the issue lies in possibly outdated client files. For correctness,
// all client files have to be HMR reloaded or proven unchanged.
// Configuration changes are already handled by the `config.version` data.
location.reload();
},
[MessageId.hot_update](view) {
const code = td.decode(view.buffer);
const modules = (0, eval)(code);
replaceModules(modules);
const reader = new DataViewReader(view, 1);
const cssCount = reader.u32();
if (cssCount > 0) {
for (let i = 0; i < cssCount; i++) {
const moduleId = reader.stringWithLength(16);
const content = reader.string32();
reloadCss(moduleId, content);
}
}
if (reader.hasMoreData()) {
const code = td.decode(reader.rest());
const modules = (0, eval)(code);
replaceModules(modules);
}
},
[MessageId.errors]: onErrorMessage,
[MessageId.errors_cleared]: onErrorClearedMessage,
[MessageId.route_update](view) {
const reader = new DataViewReader(view, 1);
let routeCount = reader.u32();
@@ -68,11 +99,32 @@ initWebSocket({
while (routeCount > 0) {
routeCount -= 1;
const routeId = reader.u32();
const routePattern = reader.stringWithLength(reader.u16());
const routePattern = reader.string32();
if (routeMatch(routeId, routePattern)) {
performRouteReload();
break;
}
}
},
[MessageId.errors]: onErrorMessage,
[MessageId.errors_cleared]: onErrorClearedMessage,
});
function reloadCss(id: string, newContent: string) {
console.log(`[Bun] Reloading CSS: ${id}`);
// TODO: can any of the following operations throw?
let sheet = cssStore.get(id);
if (!sheet) {
sheet = new CSSStyleSheet();
sheet.replace(newContent);
document.adoptedStyleSheets.push(sheet);
cssStore.set(id, sheet);
// Delete the link tag if it exists
document.querySelector(`link[href="/_bun/css/${id}.css"]`)?.remove();
return;
}
sheet.replace(newContent);
}

View File

@@ -10,7 +10,7 @@ import { decodeAndAppendError, onErrorMessage, updateErrorOverlay } from "./clie
import { DataViewReader } from "./client/reader";
import { routeMatch } from "./client/route";
import { initWebSocket } from "./client/websocket";
import { MessageId } from "./enums";
import { MessageId } from "./generated";
/** Injected by DevServer */
declare const error: Uint8Array;

View File

@@ -8,9 +8,8 @@ if (typeof IS_BUN_DEVELOPMENT !== "boolean") {
throw new Error("DCE is configured incorrectly");
}
// Server Side
server_exports = {
async handleRequest(req, { clientEntryPoint }, requested_id) {
async handleRequest(req, routeModuleId, clientEntryUrl, styles) {
const serverRenderer = loadModule<Bake.ServerEntryPoint>(config.main, LoadModuleType.AssertPresent).exports.default;
if (!serverRenderer) {
@@ -20,9 +19,10 @@ server_exports = {
throw new Error('Framework server entrypoint\'s "default" export is not a function.');
}
const response = await serverRenderer(req, loadModule(requested_id, LoadModuleType.AssertPresent).exports, {
styles: [],
scripts: [clientEntryPoint],
const response = await serverRenderer(req, loadModule(routeModuleId, LoadModuleType.AssertPresent).exports, {
styles: styles,
scripts: [clientEntryUrl],
devRoutePath: routeModuleId,
});
if (!(response instanceof Response)) {

Some files were not shown because too many files have changed in this diff Show More