Compare commits

..

7 Commits

Author SHA1 Message Date
Dylan Conway
360925bb37 Merge branch 'main' into dylan/ref-string 2024-10-15 16:44:18 -07:00
Dylan Conway
e678a4220f Merge branch 'main' into dylan/ref-string 2024-10-12 00:49:20 -07:00
Dylan Conway
09c57f5887 Revert "avoid integer overflow with empty string"
This reverts commit 0f2b44a889.
2024-10-12 00:43:19 -07:00
Dylan Conway
091d279b7f Revert "add test"
This reverts commit 83a8b03224.
2024-10-12 00:43:18 -07:00
Dylan Conway
83a8b03224 add test 2024-10-12 00:29:25 -07:00
Dylan Conway
0f2b44a889 avoid integer overflow with empty string 2024-10-12 00:25:24 -07:00
Dylan Conway
2d2643f2f4 make sure ref 2024-10-11 23:38:04 -07:00
355 changed files with 7272 additions and 79885 deletions

View File

@@ -1,406 +0,0 @@
#!/usr/bin/env node
/**
* Build and test Bun on macOS, Linux, and Windows.
* @link https://buildkite.com/docs/pipelines/defining-steps
*/
import { writeFileSync } from "node:fs";
import { join } from "node:path";
function getEnv(name, required = true) {
const value = process.env[name];
if (!value && required) {
throw new Error(`Missing environment variable: ${name}`);
}
return value;
}
function getRepository() {
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
if (!match) {
throw new Error(`Unsupported repository: ${url}`);
}
const [, owner, repo] = match;
return `${owner}/${repo}`;
}
function getCommit() {
return getEnv("BUILDKITE_COMMIT");
}
function getBranch() {
return getEnv("BUILDKITE_BRANCH");
}
function getMainBranch() {
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
}
function isFork() {
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
return !!repository && repository !== getEnv("BUILDKITE_REPO");
}
function isMainBranch() {
return getBranch() === getMainBranch() && !isFork();
}
function isMergeQueue() {
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
}
function isPullRequest() {
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
}
async function getChangedFiles() {
const repository = getRepository();
const head = getCommit();
const base = isMainBranch() ? `${head}^1` : getMainBranch();
try {
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
if (response.ok) {
const { files } = await response.json();
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
}
} catch (error) {
console.error(error);
}
}
function isDocumentation(filename) {
return /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/.test(filename);
}
function isTest(filename) {
return /^test/.test(filename);
}
function toYaml(obj, indent = 0) {
const spaces = " ".repeat(indent);
let result = "";
for (const [key, value] of Object.entries(obj)) {
if (value === null) {
result += `${spaces}${key}: null\n`;
continue;
}
if (Array.isArray(value)) {
result += `${spaces}${key}:\n`;
value.forEach(item => {
if (typeof item === "object" && item !== null) {
result += `${spaces}- \n${toYaml(item, indent + 2)
.split("\n")
.map(line => `${spaces} ${line}`)
.join("\n")}\n`;
} else {
result += `${spaces}- ${item}\n`;
}
});
continue;
}
if (typeof value === "object") {
result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`;
continue;
}
if (
typeof value === "string" &&
(value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n"))
) {
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
continue;
}
result += `${spaces}${key}: ${value}\n`;
}
return result;
}
function getPipeline() {
/**
* Helpers
*/
const getKey = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `${os}-${arch}-baseline`;
}
return `${os}-${arch}`;
};
const getLabel = platform => {
const { os, arch, baseline } = platform;
if (baseline) {
return `:${os}: ${arch}-baseline`;
}
return `:${os}: ${arch}`;
};
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
const getRetry = (limit = 3) => {
return {
automatic: [
{ exit_status: 1, limit: 1 },
{ exit_status: -1, limit },
{ exit_status: 255, limit },
{ signal_reason: "agent_stop", limit },
],
};
};
// https://buildkite.com/docs/pipelines/managing-priorities
const getPriority = () => {
if (isFork()) {
return -1;
}
if (isMainBranch()) {
return 2;
}
if (isMergeQueue()) {
return 1;
}
return 0;
};
/**
* Steps
*/
const getBuildVendorStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-vendor`,
label: `${getLabel(platform)} - build-vendor`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target dependencies",
};
};
const getBuildCppStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-cpp`,
label: `${getLabel(platform)} - build-cpp`,
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_CPP_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getBuildZigStep = platform => {
const { os, arch, baseline } = platform;
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
return {
key: `${getKey(platform)}-build-zig`,
label: `${getLabel(platform)} - build-zig`,
agents: {
queue: "build-zig",
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
};
};
const getBuildBunStep = platform => {
const { os, arch, baseline } = platform;
return {
key: `${getKey(platform)}-build-bun`,
label: `${getLabel(platform)} - build-bun`,
depends_on: [
`${getKey(platform)}-build-vendor`,
`${getKey(platform)}-build-cpp`,
`${getKey(platform)}-build-zig`,
],
agents: {
os,
arch,
queue: `build-${os}`,
},
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: {
BUN_LINK_ONLY: "ON",
ENABLE_BASELINE: baseline ? "ON" : "OFF",
},
command: "bun run build:ci --target bun",
};
};
const getTestBunStep = platform => {
const { os, arch, distro, release } = platform;
let name;
if (os === "darwin" || os === "windows") {
name = getLabel(platform);
} else {
name = getLabel({ ...platform, os: distro });
}
let agents;
if (os === "darwin") {
agents = { os, arch, queue: `test-darwin` };
} else if (os === "windows") {
agents = { os, arch, robobun: true };
} else {
agents = { os, arch, distro, release, robobun: true };
}
let command;
if (os === "windows") {
command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`;
} else {
command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`;
}
let parallelism;
if (os === "darwin") {
parallelism = 2;
} else {
parallelism = 10;
}
return {
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
label: `${name} - test-bun`,
depends_on: [`${getKey(platform)}-build-bun`],
agents,
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
soft_fail: isMainBranch(),
parallelism,
command,
};
};
/**
* Config
*/
const buildPlatforms = [
{ os: "darwin", arch: "aarch64" },
{ os: "darwin", arch: "x64" },
{ os: "linux", arch: "aarch64" },
{ os: "linux", arch: "x64" },
{ os: "linux", arch: "x64", baseline: true },
{ os: "windows", arch: "x64" },
{ os: "windows", arch: "x64", baseline: true },
];
const testPlatforms = [
{ os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "aarch64", distro: "ventura", release: "13" },
{ os: "darwin", arch: "x64", distro: "sonoma", release: "14" },
{ os: "darwin", arch: "x64", distro: "ventura", release: "13" },
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
];
return {
priority: getPriority(),
steps: [
...buildPlatforms.map(platform => {
const { os, arch, baseline } = platform;
return {
key: getKey(platform),
group: getLabel(platform),
steps: [
getBuildVendorStep(platform),
getBuildCppStep(platform),
getBuildZigStep(platform),
getBuildBunStep(platform),
...testPlatforms
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
.map(platform => getTestBunStep(platform)),
],
};
}),
],
};
}
async function main() {
console.log("Checking environment...");
console.log(" - Repository:", getRepository());
console.log(" - Branch:", getBranch());
console.log(" - Commit:", getCommit());
console.log(" - Is Main Branch:", isMainBranch());
console.log(" - Is Merge Queue:", isMergeQueue());
console.log(" - Is Pull Request:", isPullRequest());
const changedFiles = await getChangedFiles();
if (changedFiles) {
console.log(
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
);
if (changedFiles.every(filename => isDocumentation(filename))) {
console.log("Since changed files are only documentation, skipping...");
return;
}
if (changedFiles.every(filename => isTest(filename) || isDocumentation(filename))) {
// TODO: console.log("Since changed files contain tests, skipping build...");
}
}
const pipeline = getPipeline();
const content = toYaml(pipeline);
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
writeFileSync(contentPath, content);
console.log("Generated pipeline:");
console.log(" - Path:", contentPath);
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
}
await main();

790
.buildkite/ci.yml Normal file
View File

@@ -0,0 +1,790 @@
# Build and test Bun on macOS, Linux, and Windows.
# https://buildkite.com/docs/pipelines/defining-steps
#
# If a step has the `robobun: true` label, robobun will listen
# to webhooks from Buildkite and provision a VM to run the step.
#
# Changes to this file will be automatically uploaded on the next run
# for a particular commit.
steps:
# macOS aarch64
- key: "darwin-aarch64"
group: ":darwin: aarch64"
steps:
- key: "darwin-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
- key: "darwin-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "aarch64"
depends_on:
- "darwin-aarch64-build-deps"
- "darwin-aarch64-build-cpp"
- "darwin-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-aarch64-test-macos-14"
label: ":darwin: 14 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
- key: "darwin-aarch64-test-macos-13"
label: ":darwin: 13 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-aarch64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "aarch64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
# macOS x64
- key: "darwin-x64"
group: ":darwin: x64"
steps:
- key: "darwin-x64-build-deps"
label: "build-deps"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "darwin-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
- key: "darwin-x64-build-bun"
label: "build-bun"
agents:
queue: "build-darwin"
os: "darwin"
arch: "x64"
depends_on:
- "darwin-x64-build-deps"
- "darwin-x64-build-cpp"
- "darwin-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "darwin-x64-test-macos-14"
label: ":darwin: 14 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "14"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
- key: "darwin-x64-test-macos-13"
label: ":darwin: 13 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 3
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "darwin-x64-build-bun"
agents:
queue: "test-darwin"
os: "darwin"
arch: "x64"
release: "13"
command:
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
# Linux x64
- key: "linux-x64"
group: ":linux: x64"
steps:
- key: "linux-x64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64"
- key: "linux-x64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-build-deps"
- "linux-x64-build-cpp"
- "linux-x64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-test-debian-12"
label: ":debian: 12 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
- key: "linux-x64-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
# Linux x64-baseline
- key: "linux-x64-baseline"
group: ":linux: x64-baseline"
steps:
- key: "linux-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "linux-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
- key: "linux-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "x64"
depends_on:
- "linux-x64-baseline-build-deps"
- "linux-x64-baseline-build-cpp"
- "linux-x64-baseline-build-zig"
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-x64-baseline-test-debian-12"
label: ":debian: 12 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2204"
label: ":ubuntu: 22.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
- key: "linux-x64-baseline-test-ubuntu-2004"
label: ":ubuntu: 20.04 x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-x64-baseline-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "x64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
# Linux aarch64
- key: "linux-aarch64"
group: ":linux: aarch64"
steps:
- key: "linux-aarch64-build-deps"
label: "build-deps"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
command:
- "bun run build:ci --target dependencies"
- key: "linux-aarch64-build-cpp"
label: "build-cpp"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
- key: "linux-aarch64-build-bun"
label: "build-bun"
agents:
queue: "build-linux"
os: "linux"
arch: "aarch64"
depends_on:
- "linux-aarch64-build-deps"
- "linux-aarch64-build-cpp"
- "linux-aarch64-build-zig"
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "linux-aarch64-test-debian-12"
label: ":debian: 12 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "debian"
release: "12"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2204"
label: ":ubuntu: 22.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "22.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
- key: "linux-aarch64-test-ubuntu-2004"
label: ":ubuntu: 20.04 aarch64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 2
retry:
automatic:
- exit_status: 1
limit: 1
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "linux-aarch64-build-bun"
agents:
robobun: "true"
os: "linux"
arch: "aarch64"
distro: "ubuntu"
release: "20.04"
command:
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
# Windows x64
- key: "windows-x64"
group: ":windows: x64"
steps:
- key: "windows-x64-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64"
- key: "windows-x64-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-build-deps"
- "windows-x64-build-cpp"
- "windows-x64-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-test-bun"
label: ":windows: x64 - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
# Windows x64-baseline
- key: "windows-x64-baseline"
group: ":windows: x64-baseline"
steps:
- key: "windows-x64-baseline-build-deps"
label: "build-deps"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target dependencies"
- key: "windows-x64-baseline-build-cpp"
label: "build-cpp"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_CPP_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-build-zig"
label: "build-zig"
agents:
queue: "build-zig"
env:
ENABLE_BASELINE: "ON"
command:
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
- key: "windows-x64-baseline-build-bun"
label: "build-bun"
agents:
queue: "build-windows"
os: "windows"
arch: "x64"
depends_on:
- "windows-x64-baseline-build-deps"
- "windows-x64-baseline-build-cpp"
- "windows-x64-baseline-build-zig"
retry:
automatic:
- exit_status: 255
limit: 5
env:
ENABLE_BASELINE: "ON"
BUN_LINK_ONLY: "ON"
command:
- "bun run build:ci --target bun"
- key: "windows-x64-baseline-test-bun"
label: ":windows: x64-baseline - test-bun"
if: "build.branch != 'main'"
parallelism: 10
soft_fail:
- exit_status: 1
retry:
automatic:
- exit_status: -1
limit: 3
- exit_status: 255
limit: 3
- signal_reason: agent_stop
limit: 3
- signal: SIGTERM
limit: 3
depends_on:
- "windows-x64-baseline-build-bun"
agents:
robobun: "true"
os: "windows"
arch: "x64"
command:
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"

View File

@@ -29,10 +29,6 @@ function assert_curl() {
assert_command "curl" "curl" "https://curl.se/download.html"
}
function assert_node() {
assert_command "node" "node" "https://nodejs.org/en/download/"
}
function assert_command() {
local command="$1"
local package="$2"
@@ -96,15 +92,6 @@ assert_build
assert_buildkite_agent
assert_jq
assert_curl
assert_node
assert_release
assert_canary
# If is Windows x64, run scripts/disk-space.ps1
if [ -n "$BUILDKITE_AGENT_META_DATA_OS" ] && [ "$BUILDKITE_AGENT_META_DATA_OS" == "windows" ]; then
run_command powershell ".buildkite/scripts/disk-space.ps1"
fi
run_command node ".buildkite/ci.mjs"
if [ -f ".buildkite/ci.yml" ]; then
upload_buildkite_pipeline ".buildkite/ci.yml"
fi
upload_buildkite_pipeline ".buildkite/ci.yml"

2
.gitattributes vendored
View File

@@ -49,5 +49,3 @@ vendor/brotli/** linguist-vendored
test/js/node/test/fixtures linguist-vendored
test/js/node/test/common linguist-vendored
test/js/bun/css/files linguist-vendored

View File

@@ -7,7 +7,6 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,7 +7,6 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -83,26 +83,6 @@ jobs:
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
rm -rf is-outdated.txt outdated.txt latest.txt
- name: Generate comment text with Sentry Link
if: github.event.label.name == 'crash'
# ignore if fail
continue-on-error: true
id: generate-comment-text
env:
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
shell: bash
run: |
bun scripts/associate-issue-with-sentry.ts
if [[ -f "sentry-link.txt" ]]; then
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
fi
if [[ -f "sentry-id.txt" ]]; then
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
fi
- name: Add labels
uses: actions-cool/issues-helper@v3
if: github.event.label.name == 'crash'
@@ -112,7 +92,7 @@ jobs:
issue-number: ${{ github.event.issue.number }}
labels: ${{ steps.add-labels.outputs.labels }}
- name: Comment outdated
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
@@ -126,40 +106,6 @@ jobs:
```sh
bun upgrade
```
- name: Comment with Sentry Link and outdated version
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
Are you able to reproduce this crash on the latest version of Bun?
```sh
bun upgrade
```
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment with Sentry Link
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
uses: actions-cool/issues-helper@v3
with:
actions: "create-comment"
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Thank you for reporting this crash.
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
- name: Comment needs repro
if: github.event.label.name == 'needs repro'
uses: actions-cool/issues-helper@v3

View File

@@ -7,7 +7,6 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

View File

@@ -7,7 +7,6 @@ on:
workflow_call:
workflow_dispatch:
pull_request:
merge_group:
env:
BUN_VERSION: "1.1.27"

5
.gitignore vendored
View File

@@ -141,7 +141,6 @@ test/node.js/upstream
.zig-cache
scripts/env.local
*.generated.ts
src/bake/generated.ts
# Dependencies
/vendor
@@ -164,7 +163,3 @@ src/bake/generated.ts
/src/deps/zstd
/src/deps/zlib
/src/deps/zig
# Generated files
.buildkite/ci.yml

70
.vscode/launch.json generated vendored
View File

@@ -14,7 +14,7 @@
"name": "bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -29,7 +29,7 @@
"name": "bun test [file] --only",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -50,7 +50,7 @@
"name": "bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -65,7 +65,7 @@
"name": "bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
@@ -80,7 +80,7 @@
"name": "bun test [file] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -95,7 +95,7 @@
"name": "bun test [file] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -110,7 +110,7 @@
"name": "bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -131,7 +131,7 @@
"name": "bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -268,7 +268,7 @@
"name": "bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -283,7 +283,7 @@
"name": "bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -298,7 +298,7 @@
"name": "bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -313,7 +313,7 @@
"name": "bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -328,7 +328,7 @@
"name": "bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -343,7 +343,7 @@
"name": "bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -364,7 +364,7 @@
"name": "bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -401,7 +401,7 @@
"name": "bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -415,7 +415,7 @@
"name": "bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -429,7 +429,7 @@
"name": "bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -481,7 +481,7 @@
"name": "Windows: bun test [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -510,7 +510,7 @@
"name": "Windows: bun test --only [file]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -539,7 +539,7 @@
"name": "Windows: bun test [file] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -568,7 +568,7 @@
"name": "Windows: bun test [file] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -597,7 +597,7 @@
"name": "Windows: bun test [file] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -635,7 +635,7 @@
"name": "Windows: bun test [file] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -822,7 +822,7 @@
"name": "Windows: bun test [...]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -851,7 +851,7 @@
"name": "Windows: bun test [...] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -880,7 +880,7 @@
"name": "Windows: bun test [...] (verbose)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -909,7 +909,7 @@
"name": "Windows: bun test [...] --watch",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -938,7 +938,7 @@
"name": "Windows: bun test [...] --hot",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -967,7 +967,7 @@
"name": "Windows: bun test [...] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1005,7 +1005,7 @@
"name": "Windows: bun test [...] --inspect-brk",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1070,7 +1070,7 @@
"name": "Windows: bun test [*]",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1095,7 +1095,7 @@
"name": "Windows: bun test [*] (fast)",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1124,7 +1124,7 @@
"name": "Windows: bun test [*] --inspect",
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
"args": ["test"],
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}/test",
"environment": [
{
"name": "FORCE_COLOR",
@@ -1196,4 +1196,4 @@
"description": "Usage: bun test [...]",
},
],
}
}

View File

@@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
```
```bash#openSUSE Tumbleweed
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
```
{% /codetabs %}
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
```bash#Fedora
$ sudo dnf install 'dnf-command(copr)'
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
$ sudo dnf install llvm16 clang16 lld16-devel
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
$ sudo dnf install llvm clang lld
```
```bash#openSUSE Tumbleweed

2
LATEST
View File

@@ -1 +1 @@
1.1.33
1.1.30

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,11 +1,11 @@
{
"name": "bench",
"scripts": {
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"deps": "cd src && bun run deps",
"build": "cd src && bun run build",
"bench:deno": "deno run -A --unstable deno.js",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,14 +0,0 @@
syntax = "proto3";
package benchmark;
service BenchmarkService {
rpc Ping(Request) returns (Response);
}
message Request {
string message = 1;
}
message Response {
string message = 1;
}

View File

@@ -1,33 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL
BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j
YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE
BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD
VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6
LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/
cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia
SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX
InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8
RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr
uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ
x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ
hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw
5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR
Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G
TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV
FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF
MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11
jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0
GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H
HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb
P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99
p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p
OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo
Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn
Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB
n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK
qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL
-----END CERTIFICATE-----

View File

@@ -1,31 +0,0 @@
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");
const packageDefinition = protoLoader.loadSync("benchmark.proto", {});
const proto = grpc.loadPackageDefinition(packageDefinition).benchmark;
const fs = require("fs");
function ping(call, callback) {
callback(null, { message: "Hello, World" });
}
function main() {
const server = new grpc.Server();
server.addService(proto.BenchmarkService.service, { ping: ping });
const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true");
const port = process.env.PORT || 50051;
const host = process.env.HOST || "localhost";
let credentials;
if (tls) {
const ca = fs.readFileSync("./cert.pem");
const key = fs.readFileSync("./key.pem");
const cert = fs.readFileSync("./cert.pem");
credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]);
} else {
credentials = grpc.ServerCredentials.createInsecure();
}
server.bindAsync(`${host}:${port}`, credentials, () => {
console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`);
});
}
main();

View File

@@ -1,52 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN
THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678
menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP
BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL
ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf
v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t
D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV
SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS
8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA
TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV
4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB
IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc
wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV
SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa
WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ
8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t
/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3
cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u
RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5
ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9
uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc
Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0
8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs
B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt
otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS
VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS
TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO
z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J
P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO
auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r
hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD
GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD
Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+
Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw
/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo
+qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD
UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY
aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG
wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP
BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr
vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF
kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r
QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K
Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8
oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf
Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO
eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl
VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f
kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD
z/cCLOrUJfealezimyd8SKPWPeHhrA==
-----END PRIVATE KEY-----

View File

@@ -1,15 +0,0 @@
{
"name": "bench",
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bun:server": "TLS=1 PORT=50051 bun ./index.js",
"node:server": "TLS=1 PORT=50051 node ./index.js",
"bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051",
"bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051"
},
"dependencies": {
"@grpc/grpc-js": "1.12.0",
"@grpc/proto-loader": "0.7.10"
}
}

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:deno": "deno run -A --unstable deno.js",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench:deno": "$DENO run -A --unstable deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
},
"dependencies": {

View File

@@ -3,9 +3,9 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "bun bun.js | grep iter",
"bench:node": "node node.mjs | grep iter",
"bench:deno": "deno run -A --unstable deno.mjs | grep iter",
"bench:bun": "$BUN bun.js | grep iter",
"bench:node": "$NODE node.mjs | grep iter",
"bench:deno": "$DENO run -A --unstable deno.mjs | grep iter",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -3,8 +3,8 @@
"scripts": {
"deps": "exit 0",
"build": "exit 0",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"bench": "bun run bench:bun && bun run bench:node"
}
}

View File

@@ -1,15 +0,0 @@
import { bench, run } from "./runner.mjs";
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
for (let fillSize of [4, 8, 16, 11]) {
const buffer = Buffer.allocUnsafe(size);
const pattern = "x".repeat(fillSize);
bench(`Buffer.fill ${size} bytes with ${fillSize} byte value`, () => {
buffer.fill(pattern);
});
}
}
await run();

View File

@@ -5,10 +5,10 @@
},
"scripts": {
"build": "exit 0",
"bench:bun": "bun bun.js",
"bench:node": "node node.mjs",
"bench:bun": "$BUN bun.js",
"bench:node": "$NODE node.mjs",
"deps": "npm install && bash src/download.sh",
"bench:deno": "deno run -A --unstable-ffi deno.js",
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
}
}

View File

@@ -1,84 +0,0 @@
# CI
This directory contains scripts for building CI images for Bun.
## Building
### `macOS`
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
To install the dependencies required, run:
```sh
$ cd ci
$ bun run bootstrap
```
To build a vanilla macOS VM, run:
```sh
$ bun run build:darwin-aarch64-vanilla
```
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
> Note: The image size is 50GB, so make sure you have enough disk space.
If you want to build a specific macOS release, you can run:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: You cannot build a newer release of macOS on an older macOS machine.
To build a macOS VM with software installed to build and test Bun, run:
```sh
$ bun run build:darwin-aarch64
```
## Running
### `macOS`
## How To
### Support a new macOS release
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
```hcl
sonoma = {
distro = "sonoma"
release = "15"
ipsw = "https://updates.cdn-apple.com/..."
}
```
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
```sh
$ bun run build:darwin-aarch64-vanilla-15
```
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
4. Test and build the non-vanilla image:
```sh
$ bun run build:darwin-aarch64-15
```
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
5. Publish the images:
```sh
$ bun run login
$ bun run publish:darwin-aarch64-vanilla-15
$ bun run publish:darwin-aarch64-15
```

View File

@@ -1,46 +0,0 @@
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
# See login.sh and optimize.sh for details.
data "external-raw" "boot-script" {
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
}
source "tart-cli" "bun-darwin-aarch64-vanilla" {
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
from_ipsw = local.release.ipsw
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
create_grace_time = "30s"
boot_command = split("\n", data.external-raw.boot-script.result)
headless = true # Disable if you need to debug why the boot_command is not working
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
provisioner "file" {
content = file("scripts/setup-login.sh")
destination = "/tmp/setup-login.sh"
}
provisioner "shell" {
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
}
provisioner "file" {
content = file("scripts/optimize-machine.sh")
destination = "/tmp/optimize-machine.sh"
}
provisioner "shell" {
inline = ["sudo sh /tmp/optimize-machine.sh"]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -1,44 +0,0 @@
# Generates a macOS VM with software installed to build and test Bun.
source "tart-cli" "bun-darwin-aarch64" {
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
cpu_count = local.cpu_count
memory_gb = local.memory_gb
disk_size_gb = local.disk_size_gb
ssh_username = local.username
ssh_password = local.password
ssh_timeout = "120s"
headless = true
}
build {
sources = ["source.tart-cli.bun-darwin-aarch64"]
provisioner "file" {
content = file("../../scripts/bootstrap.sh")
destination = "/tmp/bootstrap.sh"
}
provisioner "shell" {
inline = ["CI=true sh /tmp/bootstrap.sh"]
}
provisioner "file" {
source = "darwin/plists/"
destination = "/tmp/"
}
provisioner "shell" {
inline = [
"sudo ls /tmp/",
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
]
}
provisioner "shell" {
inline = ["sudo rm -rf /tmp/*"]
}
}

View File

@@ -1,44 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.buildkite.buildkite-agent</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/buildkite-agent</string>
<string>start</string>
</array>
<key>KeepAlive</key>
<dict>
<key>SuccessfulExit</key>
<false />
</dict>
<key>RunAtLoad</key>
<true />
<key>StandardOutPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>StandardErrorPath</key>
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
<key>EnvironmentVariables</key>
<dict>
<key>BUILDKITE_AGENT_CONFIG</key>
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
</dict>
<key>LimitLoadToSessionType</key>
<array>
<string>Aqua</string>
<string>LoginWindow</string>
<string>Background</string>
<string>StandardIO</string>
<string>System</string>
</array>
</dict>
</plist>

View File

@@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscale</string>
<string>up</string>
<string>--ssh</string>
<string>--authkey</string>
<string>${TAILSCALE_AUTHKEY}</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -1,16 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>com.tailscale.tailscaled</string>
<key>ProgramArguments</key>
<array>
<string>/usr/local/bin/tailscaled</string>
</array>
<key>RunAtLoad</key>
<true />
</dict>
</plist>

View File

@@ -1,124 +0,0 @@
#!/bin/sh
# This script generates the boot commands for the macOS installer GUI.
# It is run on your local machine, not inside the VM.
# Sources:
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
echo "Script must be run with variables: release, username, and password" >&2
exit 1
fi
# Hello, hola, bonjour, etc.
echo "<wait120s><spacebar>"
# Select Your Country and Region
echo "<wait30s>italiano<esc>english<enter>"
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
# Written and Spoken Languages
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Accessibility
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Data & Privacy
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Migration Assistant
echo "<wait30s><tab><tab><tab><spacebar>"
# Sign In with Your Apple ID
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you want to skip signing in with an Apple ID?
echo "<wait30s><tab><spacebar>"
# Terms and Conditions
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# I have read and agree to the macOS Software License Agreement
echo "<wait30s><tab><spacebar>"
# Create a Computer Account
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
# Enable Location Services
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Are you sure you don't want to use Location Services?
echo "<wait30s><tab><spacebar>"
# Select Your Time Zone
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
# Analytics
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
# Screen Time
echo "<wait30s><tab><spacebar>"
# Siri
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
# Choose Your Look
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
# Enable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
else
# Welcome to Mac
echo "<wait30s><spacebar>"
# Enable Keyboard navigation
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
echo "<wait30s><leftAltOn>q<leftAltOff>"
fi
# Now that the installation is done, open "System Settings"
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
# Navigate to "Sharing"
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
if [ "${release}" = "13" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><down><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
# Open "Remote Login" details
echo "<wait30s><tab><spacebar>"
# Enable "Full Disk Access"
echo "<wait30s><tab><spacebar>"
# Click "Done"
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
# Disable Voice Over
echo "<leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "14" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
# Disable Voice Over
echo "<wait30s><leftAltOn><f5><leftAltOff>"
elif [ "${release}" = "15" ]; then
# Navigate to "Screen Sharing" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
# Navigate to "Remote Login" and enable it
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
fi
# Quit System Settings
echo "<wait30s><leftAltOn>q<leftAltOff>"

View File

@@ -1,122 +0,0 @@
#!/bin/sh
# This script optimizes macOS for virtualized environments.
# It disables things like spotlight, screen saver, and sleep.
# Sources:
# - https://github.com/sickcodes/osx-optimizer
# - https://github.com/koding88/MacBook-Optimization-Script
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
disable_software_update() {
execute softwareupdate --schedule off
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
execute defaults write com.apple.commerce AutoUpdate -bool false
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
}
disable_spotlight() {
execute mdutil -i off -a
execute mdutil -E /
}
disable_siri() {
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
execute defaults write com.apple.Siri StatusMenuVisible -bool false
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
}
disable_sleep() {
execute systemsetup -setsleep Never
execute systemsetup -setcomputersleep Never
execute systemsetup -setdisplaysleep Never
execute systemsetup -setharddisksleep Never
}
disable_screen_saver() {
execute defaults write com.apple.screensaver loginWindowIdleTime 0
execute defaults write com.apple.screensaver idleTime 0
}
disable_screen_lock() {
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
}
disable_wallpaper() {
execute defaults write com.apple.loginwindow DesktopPicture ""
}
disable_application_state() {
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
}
disable_accessibility() {
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
execute defaults write com.apple.universalaccess reduceMotion -int 1
execute defaults write com.apple.universalaccess reduceTransparency -int 1
}
disable_dashboard() {
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
execute killall Dock
}
disable_animations() {
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
execute defaults write -g QLPanelAnimationDuration -float 0
execute defaults write com.apple.finder DisableAllAnimations -bool true
}
disable_time_machine() {
execute tmutil disable
}
enable_performance_mode() {
# https://support.apple.com/en-us/101992
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
fi
}
add_terminal_to_desktop() {
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
}
main() {
disable_software_update
disable_spotlight
disable_siri
disable_sleep
disable_screen_saver
disable_screen_lock
disable_wallpaper
disable_application_state
disable_accessibility
disable_dashboard
disable_animations
disable_time_machine
enable_performance_mode
add_terminal_to_desktop
}
main

View File

@@ -1,78 +0,0 @@
#!/bin/sh
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
# Sources:
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
if [ "$(id -u)" != "0" ]; then
echo "This script must be run using sudo." >&2
exit 1
fi
execute() {
echo "$ $@" >&2
if ! "$@"; then
echo "Command failed: $@" >&2
exit 1
fi
}
kcpassword() {
passwd="$1"
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
key_len=33
passwd_len=${#passwd_hex}
remainder=$((passwd_len % key_len))
if [ $remainder -ne 0 ]; then
padding=$((key_len - remainder))
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
fi
result=""
i=0
while [ $i -lt ${#passwd_hex} ]; do
for byte in $key; do
[ $i -ge ${#passwd_hex} ] && break
p="${passwd_hex:$i:2}"
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
result="${result}${r}"
i=$((i + 2))
done
done
echo "$result"
}
login() {
username="$1"
password="$2"
enable_passwordless_sudo() {
execute mkdir -p /etc/sudoers.d/
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
}
enable_auto_login() {
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
}
disable_screen_lock() {
execute sysadminctl -screenLock off -password "${password}"
}
enable_passwordless_sudo
enable_auto_login
disable_screen_lock
}
if [ $# -ne 2 ]; then
echo "Usage: $0 <username> <password>" >&2
exit 1
fi
login "$@"

View File

@@ -1,78 +0,0 @@
packer {
required_plugins {
tart = {
version = ">= 1.12.0"
source = "github.com/cirruslabs/tart"
}
external = {
version = ">= 0.0.2"
source = "github.com/joomcode/external"
}
}
}
variable "release" {
type = number
default = 13
}
variable "username" {
type = string
default = "admin"
}
variable "password" {
type = string
default = "admin"
}
variable "cpu_count" {
type = number
default = 2
}
variable "memory_gb" {
type = number
default = 4
}
variable "disk_size_gb" {
type = number
default = 50
}
locals {
sequoia = {
tier = 1
distro = "sequoia"
release = "15"
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
}
sonoma = {
tier = 2
distro = "sonoma"
release = "14"
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
}
ventura = {
tier = 2
distro = "ventura"
release = "13"
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
}
releases = {
15 = local.sequoia
14 = local.sonoma
13 = local.ventura
}
release = local.releases[var.release]
username = var.username
password = var.password
cpu_count = var.cpu_count
memory_gb = var.memory_gb
disk_size_gb = var.disk_size_gb
}

View File

@@ -1,27 +0,0 @@
{
"private": true,
"scripts": {
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
"fetch:darwin-version": "echo 1",
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
}
}

View File

@@ -79,7 +79,7 @@ endif()
optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION})
if(RELEASE AND LINUX AND CI)
if(RELEASE AND LINUX)
set(DEFAULT_LTO ON)
else()
set(DEFAULT_LTO OFF)

View File

@@ -1,11 +1,6 @@
# https://clang.llvm.org/docs/ClangFormat.html
file(GLOB BUN_H_SOURCES LIST_DIRECTORIES false ${CONFIGURE_DEPENDS}
${CWD}/src/bun.js/bindings/*.h
${CWD}/src/bun.js/modules/*.h
)
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} ${BUN_H_SOURCES})
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
register_command(
TARGET

View File

@@ -18,7 +18,6 @@ register_cmake_command(
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
-DCARES_SHARED=OFF
-DCARES_BUILD_TOOLS=OFF # this was set to ON?
-DCMAKE_INSTALL_LIBDIR=lib
LIB_PATH
lib
LIBRARIES

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/mimalloc
COMMIT
82b2c2277a4d570187c07b376557dc5bde81d848
4c283af60cdae205df5a872530c77e2a6a307d43
)
set(MIMALLOC_CMAKE_ARGS

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 9b84f43643eff64ab46daec9b860de262c80f5e2)
set(WEBKIT_VERSION 12e2f46fb01f7c5cf5a992b9414ddfaab32b7110)
endif()
if(WEBKIT_LOCAL)

View File

@@ -402,7 +402,7 @@ Bun.serve({
});
```
### Server name indication (SNI)
### Sever name indication (SNI)
To configure the server name indication (SNI) for the server, set the `serverName` field in the `tls` object.

View File

@@ -179,7 +179,7 @@ proc.kill(); // specify an exit code
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
```ts
```
const proc = Bun.spawn(["bun", "--version"]);
proc.unref();
```

View File

@@ -2,7 +2,7 @@
name: Build an app with Next.js and Bun
---
Initialize a Next.js app with `create-next-app`. This will scaffold a new Next.js project and automatically install dependencies.
Initialize a Next.js app with `create-next-app`. This automatically installs dependencies using `npm`.
```sh
$ bun create next-app

View File

@@ -37,10 +37,7 @@ Alternatively, you can create a PM2 configuration file. Create a file named `pm2
module.exports = {
name: "app", // Name of your application
script: "index.ts", // Entry point of your application
interpreter: "bun", // Bun interpreter
env: {
PATH: `${process.env.HOME}/.bun/bin:${process.env.PATH}`, // Add "~/.bun/bin/bun" to PATH
}
interpreter: "~/.bun/bin/bun", // Path to the Bun interpreter
};
```

View File

@@ -2,62 +2,56 @@
name: Build an app with SvelteKit and Bun
---
Use `sv create my-app` to create a SvelteKit project with SvelteKit CLI. Answer the prompts to select a template and set up your development environment.
Use `bun create` to scaffold your app with the `svelte` package. Answer the prompts to select a template and set up your development environment.
```sh
$ bunx sv create my-app
┌ Welcome to the Svelte CLI! (v0.5.7)
$ bun create svelte@latest my-app
┌ Welcome to SvelteKit!
◇ Which template would you like?
│ SvelteKit demo
◇ Which Svelte app template?
│ SvelteKit demo app
◇ Add type checking with Typescript?
│ Yes, using Typescript syntax
◇ Add type checking with TypeScript?
│ Yes, using TypeScript syntax
Project created
Select additional options (use arrow keys/space bar)
│ None
What would you like to add to your project?
│ none
Which package manager do you want to install dependencies with?
│ bun
Successfully installed dependencies
◇ Project next steps ─────────────────────────────────────────────────────╮
│ │
│ 1: cd my-app │
│ 2: git init && git add -A && git commit -m "Initial commit" (optional)
│ 3: bun run dev -- --open │
│ │
│ To close the dev server, hit Ctrl-C │
│ │
│ Stuck? Visit us at https://svelte.dev/chat │
│ │
├──────────────────────────────────────────────────────────────────────────╯
└ You're all set!
Your project is ready!
✔ Typescript
Inside Svelte components, use <script lang="ts">
Install community-maintained integrations:
https://github.com/svelte-add/svelte-add
```
---
Once the project is initialized, `cd` into the new project. You don't need to run 'bun install' since the dependencies are already installed.
Once the project is initialized, `cd` into the new project and install dependencies.
```sh
$ cd my-app
$ bun install
```
---
Then start the development server with `bun --bun run dev`.
To run the dev server with Node.js instead of Bun, you can omit the `--bun` flag.
```sh
$ cd my-app
$ bun --bun run dev
$ vite dev
Forced re-optimization of dependencies
VITE v5.4.10 ready in 424 ms
VITE v4.4.9 ready in 895 ms
➜ Local: http://localhost:5173/
➜ Network: use --host to expose
➜ press h + enter to show help
➜ press h to show help
```
---
@@ -81,22 +75,16 @@ Now, make the following changes to your `svelte.config.js`.
```ts-diff
- import adapter from "@sveltejs/adapter-auto";
+ import adapter from "svelte-adapter-bun";
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
import { vitePreprocess } from "@sveltejs/kit/vite";
/** @type {import('@sveltejs/kit').Config} */
const config = {
// Consult https://svelte.dev/docs/kit/integrations#preprocessors
// for more information about preprocessors
preprocess: vitePreprocess(),
kit: {
// adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list.
// If your environment is not supported, or you settled on a specific environment, switch out the adapter.
// See https://svelte.dev/docs/kit/adapters for more information about adapters.
adapter: adapter()
}
kit: {
adapter: adapter(),
},
preprocess: vitePreprocess(),
};
export default config;
```
@@ -105,21 +93,28 @@ Now, make the following changes to your `svelte.config.js`.
To build a production bundle:
```sh
$ bun --bun run build
$ vite build
vite v5.4.10 building SSR bundle for production...
"confetti" is imported from external module "@neoconfetti/svelte" but never used in "src/routes/sverdle/+page.svelte".
✓ 130 modules transformed.
vite v5.4.10 building for production...
✓ 148 modules transformed.
...
✓ built in 231ms
...
✓ built in 899ms
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
$ bun run build
$ vite build
vite v4.4.9 building SSR bundle for production...
transforming (60) node_modules/@sveltejs/kit/src/utils/escape.js
✓ 98 modules transformed.
Generated an empty chunk: "entries/endpoints/waitlist/_server.ts".
vite v4.4.9 building for production...
✓ 92 modules transformed.
Generated an empty chunk: "7".
.svelte-kit/output/client/_app/version.json 0.03 kB │ gzip: 0.05 kB
...
.svelte-kit/output/server/index.js 86.47 kB
Run npm run preview to preview your production build locally.
> Using svelte-adapter-bun
✔ Start server with: bun ./build/index.js
✔ done
✓ built in 7.81s
```

View File

@@ -65,7 +65,7 @@ Some methods are not optimized yet.
### [`node:http2`](https://nodejs.org/api/http2.html)
🟡 Client & server are implemented (95.25% of gRPC's test suite passes). Missing `options.allowHTTP1`, `options.enableConnectProtocol`, ALTSVC extension, and `http2stream.pushStream`.
🟡 Client is supported, but server isn't yet.
### [`node:https`](https://nodejs.org/api/https.html)

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.34",
"version": "1.1.31",
"workspaces": [
"./packages/bun-types"
],

View File

@@ -3113,50 +3113,32 @@ declare module "bun" {
* @example \x1b[38;2;100;200;200m
*/
| "ansi"
| "ansi-16"
| "ansi-16m"
/**
* 256 color ANSI color string, for use in terminals which don't support true color
*
* Tries to match closest 24-bit color to 256 color palette
*/
| "ansi-256"
/**
* Picks the format that produces the shortest output
*/
| "css"
| "ansi256"
/**
* Lowercase hex color string without alpha
* @example #ff9800
* @example #aabb11
*/
| "hex"
/**
* Uppercase hex color string without alpha
* @example #FF9800
*/
| "HEX"
/**
* @example hsl(35.764706, 1, 0.5)
*/
| "hsl"
/**
* @example lab(0.72732764, 33.938198, -25.311619)
*/
| "lab"
/**
* @example 16750592
*/
| "number"
/**
* RGB color string without alpha
* @example rgb(255, 152, 0)
* rgb(100, 200, 200)
*/
| "rgb"
/**
* RGB color string with alpha
* @example rgba(255, 152, 0, 1)
* rgba(100, 200, 200, 0.5)
*/
| "rgba",
| "rgba"
| "hsl"
| "lab"
| "css"
| "lab"
| "HEX",
): string | null;
function color(
@@ -3249,7 +3231,7 @@ declare module "bun" {
}
const unsafe: Unsafe;
type DigestEncoding = "utf8" | "ucs2" | "utf16le" | "latin1" | "ascii" | "base64" | "base64url" | "hex";
type DigestEncoding = "hex" | "base64";
/**
* Are ANSI colors enabled for stdin and stdout?

View File

@@ -1673,36 +1673,7 @@ declare global {
groupEnd(): void;
info(...data: any[]): void;
log(...data: any[]): void;
/**
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
* logging the argument if it can't be parsed as tabular.
*
* ```js
* // These can't be parsed as tabular data
* console.table(Symbol());
* // Symbol()
*
* console.table(undefined);
* // undefined
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
* // ┌────┬─────┬─────┐
* // │ │ a │ b │
* // ├────┼─────┼─────┤
* // │ 0 │ 1 │ 'Y' │
* // │ 1 │ 'Z' │ 2 │
* // └────┴─────┴─────┘
*
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
* // ┌────┬─────┐
* // │ │ a │
* // ├────┼─────┤
* // │ 0 │ 1 │
* // │ 1 │ 'Z' │
* // └────┴─────┘
* ```
* @param properties Alternate properties for constructing the table.
*/
/** Does nothing currently */
table(tabularData?: any, properties?: string[]): void;
/**
* Begin a timer to log with {@link console.timeEnd}

View File

@@ -212,13 +212,12 @@ void us_socket_context_add_server_name(int ssl, struct us_socket_context_t *cont
}
#endif
}
int us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
void us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
#ifndef LIBUS_NO_SSL
if (ssl) {
return us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
}
#endif
return 0;
}
/* Remove SNI context */

View File

@@ -855,11 +855,6 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
}
}
if (ERR_peek_error() != 0) {
free_ssl_context(ssl_context);
return NULL;
}
/* This must be free'd with free_ssl_context, not SSL_CTX_free */
return ssl_context;
}
@@ -1111,8 +1106,6 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
SSL_CTX *create_ssl_context_from_bun_options(
struct us_bun_socket_context_options_t options,
enum create_bun_socket_error_t *err) {
ERR_clear_error();
/* Create the context */
SSL_CTX *ssl_context = SSL_CTX_new(TLS_method());
@@ -1218,9 +1211,6 @@ SSL_CTX *create_ssl_context_from_bun_options(
return NULL;
}
// It may return spurious errors here.
ERR_clear_error();
if (options.reject_unauthorized) {
SSL_CTX_set_verify(ssl_context,
SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT,
@@ -1346,7 +1336,7 @@ void us_internal_ssl_socket_context_add_server_name(
}
}
int us_bun_internal_ssl_socket_context_add_server_name(
void us_bun_internal_ssl_socket_context_add_server_name(
struct us_internal_ssl_socket_context_t *context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user) {
@@ -1354,9 +1344,6 @@ int us_bun_internal_ssl_socket_context_add_server_name(
/* Try and construct an SSL_CTX from options */
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options, &err);
if (ssl_context == NULL) {
return -1;
}
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
@@ -1364,15 +1351,15 @@ int us_bun_internal_ssl_socket_context_add_server_name(
printf("CANNOT SET EX DATA!\n");
abort();
#endif
return -1;
}
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
/* We do not want to hold any nullptr's in our SNI tree */
if (ssl_context) {
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
/* If we already had that name, ignore */
free_ssl_context(ssl_context);
}
}
return 0;
}
void us_internal_ssl_socket_context_on_server_name(

View File

@@ -302,7 +302,7 @@ void us_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern, struct us_socket_context_options_t options,
void *user);
int us_bun_internal_ssl_socket_context_add_server_name(
void us_bun_internal_ssl_socket_context_add_server_name(
us_internal_ssl_socket_context_r context,
const char *hostname_pattern,
struct us_bun_socket_context_options_t options, void *user);

View File

@@ -234,7 +234,7 @@ unsigned short us_socket_context_timestamp(int ssl, us_socket_context_r context)
/* Adds SNI domain and cert in asn1 format */
void us_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_socket_context_options_t options, void *user);
int us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
void us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
void us_socket_context_remove_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern);
void us_socket_context_on_server_name(int ssl, us_socket_context_r context, void (*cb)(us_socket_context_r context, const char *hostname));
void *us_socket_server_name_userdata(int ssl, us_socket_r s);

View File

@@ -16,7 +16,8 @@
* limitations under the License.
*/
// clang-format off
#ifndef UWS_APP_H
#define UWS_APP_H
#include <string>
#include <charconv>
@@ -105,17 +106,14 @@ public:
/* Server name */
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}, bool *success = nullptr) {
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}) {
/* Do nothing if not even on SSL */
if constexpr (SSL) {
/* First we create a new router for this domain */
auto *domainRouter = new HttpRouter<typename HttpContextData<SSL>::RouterData>();
int result = us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
if (success) {
*success = result == 0;
}
us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
}
return std::move(*this);
@@ -240,18 +238,6 @@ public:
httpContext = HttpContext<SSL>::create(Loop::get(), options);
}
TemplatedApp(HttpContext<SSL> &context) {
httpContext = &context;
}
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
if (!httpContext) {
return nullptr;
}
return new TemplatedApp<SSL>(*httpContext);
}
bool constructorFailed() {
return !httpContext;
}
@@ -618,3 +604,4 @@ typedef TemplatedApp<true> SSLApp;
}
#endif // UWS_APP_H

View File

@@ -16,7 +16,8 @@
* limitations under the License.
*/
#pragma once
#ifndef UWS_HTTPCONTEXT_H
#define UWS_HTTPCONTEXT_H
/* This class defines the main behavior of HTTP and emits various events */
@@ -26,8 +27,6 @@
#include "AsyncSocket.h"
#include "WebSocketData.h"
#include <string>
#include <map>
#include <string_view>
#include <iostream>
#include "MoveOnlyFunction.h"
@@ -172,7 +171,7 @@ private:
#endif
/* The return value is entirely up to us to interpret. The HttpParser only care for whether the returned value is DIFFERENT or not from passed user */
auto [err, returnedSocket] = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
void *returnedSocket = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
/* For every request we reset the timeout and hang until user makes action */
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
us_socket_timeout(SSL, (us_socket_t *) s, 0);
@@ -181,9 +180,7 @@ private:
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) us_socket_ext(SSL, (us_socket_t *) s);
httpResponseData->offset = 0;
/* Are we not ready for another request yet? Terminate the connection.
* Important for denying async pipelining until, if ever, we want to suppot it.
* Otherwise requests can get mixed up on the same connection. We still support sync pipelining. */
/* Are we not ready for another request yet? Terminate the connection. */
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) {
us_socket_close(SSL, (us_socket_t *) s, 0, nullptr);
return nullptr;
@@ -283,6 +280,10 @@ private:
}
}
return user;
}, [](void *user) {
/* Close any socket on HTTP errors */
us_socket_close(SSL, (us_socket_t *) user, 0, nullptr);
return nullptr;
});
/* Mark that we are no longer parsing Http */
@@ -290,9 +291,6 @@ private:
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
if (returnedSocket == FULLPTR) {
/* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */
us_socket_write(SSL, s, httpErrorResponses[err].data(), (int) httpErrorResponses[err].length(), false);
us_socket_shutdown(SSL, s);
/* Close any socket on HTTP errors */
us_socket_close(SSL, s, 0, nullptr);
/* This just makes the following code act as if the socket was closed from error inside the parser. */
@@ -301,8 +299,9 @@ private:
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
if (returnedSocket != nullptr) {
us_socket_t* returnedSocketPtr = (us_socket_t*) returnedSocket;
/* We don't want open sockets to keep the event loop alive between HTTP requests */
us_socket_unref((us_socket_t *) returnedSocket);
us_socket_unref(returnedSocketPtr);
/* Timeout on uncork failure */
auto [written, failed] = ((AsyncSocket<SSL> *) returnedSocket)->uncork();
@@ -322,7 +321,7 @@ private:
}
}
}
return (us_socket_t *) returnedSocket;
return returnedSocketPtr;
}
/* If we upgraded, check here (differ between nullptr close and nullptr upgrade) */
@@ -484,27 +483,10 @@ public:
return;
}
/* Record this route's parameter offsets */
std::map<std::string, unsigned short, std::less<>> parameterOffsets;
unsigned short offset = 0;
for (unsigned int i = 0; i < pattern.length(); i++) {
if (pattern[i] == ':') {
i++;
unsigned int start = i;
while (i < pattern.length() && pattern[i] != '/') {
i++;
}
parameterOffsets[std::string(pattern.data() + start, i - start)] = offset;
//std::cout << "<" << std::string(pattern.data() + start, i - start) << "> is offset " << offset;
offset++;
}
}
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets)](auto *r) mutable {
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler)](auto *r) mutable {
auto user = r->getUserData();
user.httpRequest->setYield(false);
user.httpRequest->setParameters(r->getParameters());
user.httpRequest->setParameterOffsets(&parameterOffsets);
/* Middleware? Automatically respond to expectations */
std::string_view expect = user.httpRequest->getHeader("expect");
@@ -546,4 +528,4 @@ public:
}
#endif // UWS_HTTPCONTEXT_H

View File

@@ -1,53 +0,0 @@
/*
* Authored by Alex Hultman, 2018-2023.
* Intellectual property of third-party.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef UWS_HTTP_ERRORS
#define UWS_HTTP_ERRORS
#include <string_view>
namespace uWS {
/* Possible errors from http parsing */
enum HttpError {
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
HTTP_ERROR_400_BAD_REQUEST = 3
};
#ifndef UWS_HTTPRESPONSE_NO_WRITEMARK
/* Returned parser errors match this LUT. */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n<h1>HTTP Version Not Supported</h1><p>This server does not support HTTP/1.0.</p><hr><i>uWebSockets/20 Server</i>",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n<h1>Request Header Fields Too Large</h1><hr><i>uWebSockets/20 Server</i>",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n<h1>Bad Request</h1><hr><i>uWebSockets/20 Server</i>",
};
#else
/* Anonymized pages */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
};
#endif
}
#endif

View File

@@ -1,42 +0,0 @@
/*
* Authored by Alex Hultman, 2018-2023.
* Intellectual property of third-party.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma once
#include <string_view>
namespace uWS {
/* Possible errors from http parsing */
enum HttpError {
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
HTTP_ERROR_400_BAD_REQUEST = 3
};
/* Anonymized pages */
static const std::string_view httpErrorResponses[] = {
"", /* Zeroth place is no error so don't use it */
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
};
}

File diff suppressed because it is too large Load Diff

View File

@@ -15,7 +15,8 @@
* limitations under the License.
*/
// clang-format off
#pragma once
#ifndef UWS_HTTPRESPONSEDATA_H
#define UWS_HTTPRESPONSEDATA_H
/* This data belongs to the HttpResponse */
@@ -105,4 +106,4 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
}
#endif // UWS_HTTPRESPONSEDATA_H

View File

@@ -15,7 +15,9 @@
* limitations under the License.
*/
#pragma once
#ifndef UWS_TOPICTREE_H
#define UWS_TOPICTREE_H
#include <map>
#include <list>
#include <iostream>
@@ -364,4 +366,4 @@ public:
}
#endif

View File

@@ -1,51 +0,0 @@
const body = process.env.GITHUB_ISSUE_BODY;
const SENTRY_AUTH_TOKEN = process.env.SENTRY_AUTH_TOKEN;
if (!body || !SENTRY_AUTH_TOKEN) {
throw new Error("Missing environment variables");
}
const id = body.indexOf("<!-- sentry_id: ");
const endIdLine = body.indexOf(" -->", id + 1);
if (!(id > -1 && endIdLine > -1)) {
throw new Error("Missing sentry_id");
}
const sentryId = body.slice(id + "<!-- sentry_id: ".length, endIdLine).trim();
if (!sentryId) {
throw new Error("Missing sentry_id");
}
const response = await fetch(`https://sentry.io/api/0/organizations/4507155222364160/eventids/${sentryId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!response.ok) {
throw new Error(`Failed to fetch Sentry event: ${response.statusText}`);
}
const json = await response.json();
const groupId = json?.groupId;
if (!groupId) {
throw new Error("Missing groupId");
}
const issueResponse = await fetch(`https://sentry.io/api/0/issues/${groupId}/`, {
headers: {
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
},
});
if (!issueResponse.ok) {
throw new Error(`Failed to fetch Sentry issue: ${issueResponse.statusText}`);
}
const { shortId, permalink } = await issueResponse.json();
if (!shortId || !permalink) {
throw new Error("Missing shortId or permalink");
}
console.log(`Sentry ID: ${shortId}`);
console.log(`Sentry permalink: ${permalink}`);
await Bun.write("sentry-id.txt", shortId);
await Bun.write("sentry-link.txt", permalink);
export {};

View File

@@ -1,714 +0,0 @@
#!/bin/sh
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
# If this script does not work on your machine, please open an issue:
# https://github.com/oven-sh/bun/issues
# If you need to make a change to this script, such as upgrading a dependency,
# increment the version number to indicate that a new image should be built.
# Otherwise, the existing image will be retroactively updated.
v="3"
pid=$$
script="$(realpath "$0")"
print() {
echo "$@"
}
error() {
echo "error: $@" >&2
kill -s TERM "$pid"
exit 1
}
execute() {
print "$ $@" >&2
if ! "$@"; then
error "Command failed: $@"
fi
}
execute_sudo() {
if [ "$sudo" = "1" ]; then
execute "$@"
else
execute sudo "$@"
fi
}
execute_non_root() {
if [ "$sudo" = "1" ]; then
execute sudo -u "$user" "$@"
else
execute "$@"
fi
}
which() {
command -v "$1"
}
require() {
path="$(which "$1")"
if ! [ -f "$path" ]; then
error "Command \"$1\" is required, but is not installed."
fi
echo "$path"
}
fetch() {
curl=$(which curl)
if [ -f "$curl" ]; then
execute "$curl" -fsSL "$1"
else
wget=$(which wget)
if [ -f "$wget" ]; then
execute "$wget" -qO- "$1"
else
error "Command \"curl\" or \"wget\" is required, but is not installed."
fi
fi
}
download_file() {
url="$1"
filename="${2:-$(basename "$url")}"
path="$(mktemp -d)/$filename"
fetch "$url" > "$path"
print "$path"
}
compare_version() {
if [ "$1" = "$2" ]; then
echo "0"
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
echo "-1"
else
echo "1"
fi
}
append_to_file() {
file="$1"
content="$2"
if ! [ -f "$file" ]; then
execute mkdir -p "$(dirname "$file")"
execute touch "$file"
fi
echo "$content" | while read -r line; do
if ! grep -q "$line" "$file"; then
echo "$line" >> "$file"
fi
done
}
append_to_profile() {
content="$1"
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
for profile in $profiles; do
file="$HOME/$profile"
if [ "$ci" = "1" ] || [ -f "$file" ]; then
append_to_file "$file" "$content"
fi
done
}
append_to_path() {
path="$1"
if ! [ -d "$path" ]; then
error "Could not find directory: \"$path\""
fi
append_to_profile "export PATH=\"$path:\$PATH\""
export PATH="$path:$PATH"
}
check_system() {
uname="$(require uname)"
os="$($uname -s)"
case "$os" in
Linux*) os="linux" ;;
Darwin*) os="darwin" ;;
*) error "Unsupported operating system: $os" ;;
esac
arch="$($uname -m)"
case "$arch" in
x86_64 | x64 | amd64) arch="x64" ;;
aarch64 | arm64) arch="aarch64" ;;
*) error "Unsupported architecture: $arch" ;;
esac
kernel="$(uname -r)"
if [ "$os" = "darwin" ]; then
sw_vers="$(which sw_vers)"
if [ -f "$sw_vers" ]; then
distro="$($sw_vers -productName)"
release="$($sw_vers -productVersion)"
fi
if [ "$arch" = "x64" ]; then
sysctl="$(which sysctl)"
if [ -f "$sysctl" ] && [ "$($sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then
arch="aarch64"
rosetta="1"
fi
fi
fi
if [ "$os" = "linux" ] && [ -f /etc/os-release ]; then
. /etc/os-release
if [ -n "$ID" ]; then
distro="$ID"
fi
if [ -n "$VERSION_ID" ]; then
release="$VERSION_ID"
fi
fi
if [ "$os" = "linux" ]; then
rpm="$(which rpm)"
if [ -f "$rpm" ]; then
glibc="$($rpm -q glibc --queryformat '%{VERSION}\n')"
else
ldd="$(which ldd)"
awk="$(which awk)"
if [ -f "$ldd" ] && [ -f "$awk" ]; then
glibc="$($ldd --version | $awk 'NR==1{print $NF}')"
fi
fi
fi
if [ "$os" = "darwin" ]; then
brew="$(which brew)"
pm="brew"
fi
if [ "$os" = "linux" ]; then
apt="$(which apt-get)"
if [ -f "$apt" ]; then
pm="apt"
else
dnf="$(which dnf)"
if [ -f "$dnf" ]; then
pm="dnf"
else
yum="$(which yum)"
if [ -f "$yum" ]; then
pm="yum"
fi
fi
fi
if [ -z "$pm" ]; then
error "No package manager found. (apt, dnf, yum)"
fi
fi
if [ -n "$SUDO_USER" ]; then
user="$SUDO_USER"
else
whoami="$(which whoami)"
if [ -f "$whoami" ]; then
user="$($whoami)"
else
error "Could not determine the current user, set \$USER."
fi
fi
id="$(which id)"
if [ -f "$id" ] && [ "$($id -u)" = "0" ]; then
sudo=1
fi
if [ "$CI" = "true" ]; then
ci=1
fi
print "System information:"
if [ -n "$distro" ]; then
print "| Distro: $distro $release"
fi
print "| Operating system: $os"
print "| Architecture: $arch"
if [ -n "$rosetta" ]; then
print "| Rosetta: true"
fi
if [ -n "$glibc" ]; then
print "| Glibc: $glibc"
fi
print "| Package manager: $pm"
print "| User: $user"
if [ -n "$sudo" ]; then
print "| Sudo: true"
fi
if [ -n "$ci" ]; then
print "| CI: true"
fi
}
package_manager() {
case "$pm" in
apt) DEBIAN_FRONTEND=noninteractive \
execute "$apt" "$@" ;;
dnf) execute dnf "$@" ;;
yum) execute "$yum" "$@" ;;
brew)
if ! [ -f "$(which brew)" ]; then
install_brew
fi
execute_non_root brew "$@"
;;
*) error "Unsupported package manager: $pm" ;;
esac
}
update_packages() {
case "$pm" in
apt)
package_manager update
;;
esac
}
check_package() {
case "$pm" in
apt)
apt-cache policy "$1"
;;
dnf | yum | brew)
package_manager info "$1"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
install_packages() {
case "$pm" in
apt)
package_manager install --yes --no-install-recommends "$@"
;;
dnf)
package_manager install --assumeyes --nodocs --noautoremove --allowerasing "$@"
;;
yum)
package_manager install -y "$@"
;;
brew)
package_manager install --force --formula "$@"
package_manager link --force --overwrite "$@"
;;
*)
error "Unsupported package manager: $pm"
;;
esac
}
get_version() {
command="$1"
path="$(which "$command")"
if [ -f "$path" ]; then
case "$command" in
go | zig) "$path" version ;;
*) "$path" --version ;;
esac
else
print "not found"
fi
}
install_brew() {
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
NONINTERACTIVE=1 execute_non_root "$bash" "$script"
case "$arch" in
x64)
append_to_path "/usr/local/bin"
;;
aarch64)
append_to_path "/opt/homebrew/bin"
;;
esac
case "$ci" in
1)
append_to_profile "export HOMEBREW_NO_INSTALL_CLEANUP=1"
append_to_profile "export HOMEBREW_NO_AUTO_UPDATE=1"
append_to_profile "export HOMEBREW_NO_ANALYTICS=1"
;;
esac
}
install_common_software() {
case "$pm" in
apt) install_packages \
apt-transport-https \
software-properties-common
;;
dnf) install_packages \
dnf-plugins-core \
tar
;;
esac
install_packages \
bash \
ca-certificates \
curl \
jq \
htop \
gnupg \
git \
unzip \
wget \
zip
install_rosetta
install_nodejs
install_bun
}
install_nodejs() {
version="${1:-"22"}"
if ! [ "$(compare_version "$glibc" "2.27")" = "1" ]; then
version="16"
fi
case "$pm" in
dnf | yum)
bash="$(require bash)"
script=$(download_file "https://rpm.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
apt)
bash="$(require bash)"
script=$(download_file "https://deb.nodesource.com/setup_$version.x")
execute "$bash" "$script"
;;
esac
install_packages nodejs
}
install_bun() {
bash="$(require bash)"
script=$(download_file "https://bun.sh/install")
version="${1:-"latest"}"
case "$version" in
latest)
execute "$bash" "$script"
;;
*)
execute "$bash" "$script" -s "$version"
;;
esac
append_to_path "$HOME/.bun/bin"
}
install_rosetta() {
case "$os" in
darwin)
if ! [ "$(which arch)" ]; then
execute softwareupdate \
--install-rosetta \
--agree-to-license
fi
;;
esac
}
install_build_essentials() {
case "$pm" in
apt) install_packages \
build-essential \
ninja-build \
xz-utils
;;
dnf | yum) install_packages \
ninja-build \
gcc-c++ \
xz
;;
brew) install_packages \
ninja
;;
esac
install_packages \
make \
cmake \
pkg-config \
python3 \
libtool \
ruby \
perl \
golang
install_llvm
install_ccache
install_rust
install_docker
}
llvm_version_exact() {
case "$os" in
linux)
print "16.0.6"
;;
darwin | windows)
print "18.1.8"
;;
esac
}
llvm_version() {
echo "$(llvm_version_exact)" | cut -d. -f1
}
install_llvm() {
case "$pm" in
apt)
bash="$(require bash)"
script=$(download_file "https://apt.llvm.org/llvm.sh")
execute "$bash" "$script" "$(llvm_version)" all
;;
brew)
install_packages "llvm@$(llvm_version)"
;;
esac
}
install_ccache() {
case "$pm" in
apt | brew)
install_packages ccache
;;
esac
}
install_rust() {
sh="$(require sh)"
script=$(download_file "https://sh.rustup.rs")
execute "$sh" "$script" -y
append_to_path "$HOME/.cargo/bin"
}
install_docker() {
case "$pm" in
brew)
if ! [ -d "/Applications/Docker.app" ]; then
package_manager install docker --cask
fi
;;
*)
case "$distro-$release" in
amzn-2 | amzn-1)
execute amazon-linux-extras install docker
;;
amzn-*)
install_packages docker
;;
*)
sh="$(require sh)"
script=$(download_file "https://get.docker.com")
execute "$sh" "$script"
;;
esac
;;
esac
systemctl="$(which systemctl)"
if [ -f "$systemctl" ]; then
execute "$systemctl" enable docker
fi
}
install_ci_dependencies() {
if ! [ "$ci" = "1" ]; then
return
fi
install_tailscale
install_buildkite
}
install_tailscale() {
case "$os" in
linux)
sh="$(require sh)"
script=$(download_file "https://tailscale.com/install.sh")
execute "$sh" "$script"
;;
darwin)
install_packages go
execute_non_root go install tailscale.com/cmd/tailscale{,d}@latest
append_to_path "$HOME/go/bin"
;;
esac
}
install_buildkite() {
home_dir="/var/lib/buildkite-agent"
config_dir="/etc/buildkite-agent"
config_file="$config_dir/buildkite-agent.cfg"
if ! [ -d "$home_dir" ]; then
execute_sudo mkdir -p "$home_dir"
fi
if ! [ -d "$config_dir" ]; then
execute_sudo mkdir -p "$config_dir"
fi
case "$os" in
linux)
getent="$(require getent)"
if [ -z "$("$getent" passwd buildkite-agent)" ]; then
useradd="$(require useradd)"
execute "$useradd" buildkite-agent \
--system \
--no-create-home \
--home-dir "$home_dir"
fi
if [ -n "$("$getent" group docker)" ]; then
usermod="$(require usermod)"
execute "$usermod" -aG docker buildkite-agent
fi
execute chown -R buildkite-agent:buildkite-agent "$home_dir"
execute chown -R buildkite-agent:buildkite-agent "$config_dir"
;;
darwin)
execute_sudo chown -R "$user:admin" "$home_dir"
execute_sudo chown -R "$user:admin" "$config_dir"
;;
esac
if ! [ -f "$config_file" ]; then
cat <<EOF >"$config_file"
# This is generated by scripts/bootstrap.sh
# https://buildkite.com/docs/agent/v3/configuration
name="%hostname-%random"
tags="v=$v,os=$os,arch=$arch,distro=$distro,release=$release,kernel=$kernel,glibc=$glibc"
build-path="$home_dir/builds"
git-mirrors-path="$home_dir/git"
job-log-path="$home_dir/logs"
plugins-path="$config_dir/plugins"
hooks-path="$config_dir/hooks"
no-ssh-keyscan=true
cancel-grace-period=3600000 # 1 hour
enable-job-log-tmpfile=true
experiment="normalised-upload-paths,resolve-commit-after-checkout,agent-api"
EOF
fi
bash="$(require bash)"
script=$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")
execute "$bash" "$script"
out_dir="$HOME/.buildkite-agent"
execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/local/bin/buildkite-agent"
execute rm -rf "$out_dir"
}
install_chrome_dependencies() {
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud
case "$pm" in
apt)
install_packages \
fonts-liberation \
libatk-bridge2.0-0 \
libatk1.0-0 \
libc6 \
libcairo2 \
libcups2 \
libdbus-1-3 \
libexpat1 \
libfontconfig1 \
libgbm1 \
libgcc1 \
libglib2.0-0 \
libgtk-3-0 \
libnspr4 \
libnss3 \
libpango-1.0-0 \
libpangocairo-1.0-0 \
libstdc++6 \
libx11-6 \
libx11-xcb1 \
libxcb1 \
libxcomposite1 \
libxcursor1 \
libxdamage1 \
libxext6 \
libxfixes3 \
libxi6 \
libxrandr2 \
libxrender1 \
libxss1 \
libxtst6 \
xdg-utils
# Fixes issue in newer version of Ubuntu:
# Package 'libasound2' has no installation candidate
if [ "$(check_package "libasound2t64")" ]; then
install_packages libasound2t64
else
install_packages libasound2
fi
;;
dnf | yum)
install_packages \
alsa-lib \
atk \
cups-libs \
gtk3 \
ipa-gothic-fonts \
libXcomposite \
libXcursor \
libXdamage \
libXext \
libXi \
libXrandr \
libXScrnSaver \
libXtst \
pango \
xorg-x11-fonts-100dpi \
xorg-x11-fonts-75dpi \
xorg-x11-fonts-cyrillic \
xorg-x11-fonts-misc \
xorg-x11-fonts-Type1 \
xorg-x11-utils
;;
esac
}
main() {
check_system
update_packages
install_common_software
install_build_essentials
install_chrome_dependencies
install_ci_dependencies
}
main

View File

@@ -130,10 +130,7 @@ function getCachePath(branch) {
const repository = process.env.BUILDKITE_REPO;
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
const branchName = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const branchKey = branchName.startsWith("gh-readonly-queue-")
? branchName.slice(18, branchName.indexOf("-pr-"))
: branchName;
const branchKey = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
}

View File

@@ -1,57 +0,0 @@
# Get initial free space for comparison
$beforeFree = (Get-WmiObject Win32_LogicalDisk -Filter "DeviceID='C:'").FreeSpace / 1GB
Write-Host "Starting disk cleanup..."
Write-Host "Initial free space: $([math]::Round($beforeFree, 2)) GB"
# Clear Windows Temp folders
Write-Host "Cleaning Windows temp folders..."
Remove-Item -Path "C:\Windows\Temp\" -Recurse -Force -ErrorAction SilentlyContinue
Remove-Item -Path "$env:TEMP\" -Recurse -Force -ErrorAction SilentlyContinue
# Clear BuildKite artifacts and caches
Write-Host "Cleaning BuildKite artifacts..."
$buildkitePaths = @(
"C:\BuildKite\builds",
"C:\BuildKite\artifacts",
"$env:USERPROFILE\.buildkite-agent\artifacts"
)
foreach ($path in $buildkitePaths) {
if (Test-Path $path) {
Remove-Item -Path "$path\" -Recurse -Force -ErrorAction SilentlyContinue
}
}
# Clear package manager caches
Write-Host "Cleaning package manager caches..."
# NuGet
Remove-Item -Path "$env:USERPROFILE\.nuget\packages" -Recurse -Force -ErrorAction SilentlyContinue
# npm
Remove-Item -Path "$env:USERPROFILE\AppData\Roaming\npm-cache" -Recurse -Force -ErrorAction SilentlyContinue
# yarn
Remove-Item -Path "$env:USERPROFILE\AppData\Local\Yarn\Cache" -Recurse -Force -ErrorAction SilentlyContinue
# bun
Remove-Item -Path "$env:AppData\bun\install\cache" -Recurse -Force -ErrorAction SilentlyContinue
Remove-Item -Path "$env:LocalAppData\bun\install\cache" -Recurse -Force -ErrorAction SilentlyContinue
# Clean Docker
Write-Host "Cleaning Docker resources..."
if (Get-Command docker -ErrorAction SilentlyContinue) {
docker system prune -af
}
# Empty Recycle Bin
Write-Host "Emptying Recycle Bin..."
Clear-RecycleBin -Force -ErrorAction SilentlyContinue
# Run Windows Disk Cleanup utility
Write-Host "Running Windows Disk Cleanup..."
cleanmgr /sagerun:1 /autoclean
# Get final free space and calculate difference
$afterFree = (Get-WmiObject Win32_LogicalDisk -Filter "DeviceID='C:'").FreeSpace / 1GB
$spaceRecovered = $afterFree - $beforeFree
Write-Host "`nCleanup completed!"
Write-Host "Final free space: $([math]::Round($afterFree, 2)) GB"
Write-Host "Space recovered: $([math]::Round($spaceRecovered, 2)) GB"

View File

@@ -676,7 +676,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
}
// There's two parts to this.
// 1. Storing the underlying string.
// 1. Storing the underyling string.
// 2. Making the key accessible at the index.
pub fn putKey(self: *Self, key: anytype, result: *Result) !void {
self.map.mutex.lock();

View File

@@ -1,4 +1,4 @@
#include "BakeGlobalObject.h"
#include "BakeDevGlobalObject.h"
#include "JSNextTickQueue.h"
#include "JavaScriptCore/GlobalObjectMethodTable.h"
#include "JavaScriptCore/JSInternalPromise.h"
@@ -14,61 +14,34 @@ extern "C" void BakeInitProcessIdentifier()
}
JSC::JSInternalPromise*
bakeModuleLoaderImportModule(JSC::JSGlobalObject* jsGlobalObject,
moduleLoaderImportModule(JSC::JSGlobalObject* jsGlobalObject,
JSC::JSModuleLoader*, JSC::JSString* moduleNameValue,
JSC::JSValue parameters,
const JSC::SourceOrigin& sourceOrigin)
{
// TODO: forward this to the runtime?
JSC::VM& vm = jsGlobalObject->vm();
WTF::String keyString = moduleNameValue->getString(jsGlobalObject);
auto err = JSC::createTypeError(
jsGlobalObject,
WTF::makeString(
"Dynamic import to '"_s, keyString,
"' should have been replaced with a hook into the module runtime"_s));
"Dynamic import should have been replaced with a hook into the module runtime"_s));
auto* promise = JSC::JSInternalPromise::create(
vm, jsGlobalObject->internalPromiseStructure());
promise->reject(jsGlobalObject, err);
return promise;
}
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal,
JSC::JSModuleLoader* loader, JSC::JSValue key,
JSC::JSValue referrer, JSC::JSValue origin)
{
Bake::GlobalObject* global = jsCast<Bake::GlobalObject*>(jsGlobal);
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
if (global->isProduction()) {
WTF::String keyString = key.toWTFString(global);
RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier);
ASSERT(referrer.isString());
auto refererString = jsCast<JSC::JSString*>(referrer)->value(global);
BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString));
return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy));
} else {
JSC::throwTypeError(global, scope, "External imports are not allowed in Bun Bake's dev server. This is a bug in Bun's bundler."_s);
return vm.propertyNames->emptyIdentifier;
}
}
#define INHERIT_HOOK_METHOD(name) \
Zig::GlobalObject::s_globalObjectMethodTable.name
const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
const JSC::GlobalObjectMethodTable DevGlobalObject::s_globalObjectMethodTable = {
INHERIT_HOOK_METHOD(supportsRichSourceInfo),
INHERIT_HOOK_METHOD(shouldInterruptScript),
INHERIT_HOOK_METHOD(javaScriptRuntimeFlags),
INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop),
INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
bakeModuleLoaderImportModule,
bakeModuleLoaderResolve,
moduleLoaderImportModule,
INHERIT_HOOK_METHOD(moduleLoaderResolve),
INHERIT_HOOK_METHOD(moduleLoaderFetch),
INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
@@ -85,16 +58,17 @@ const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
INHERIT_HOOK_METHOD(canCompileStrings),
};
GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
DevGlobalObject*
DevGlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
const JSC::GlobalObjectMethodTable* methodTable)
{
GlobalObject* ptr = new (NotNull, JSC::allocateCell<GlobalObject>(vm))
GlobalObject(vm, structure, methodTable);
DevGlobalObject* ptr = new (NotNull, JSC::allocateCell<DevGlobalObject>(vm))
DevGlobalObject(vm, structure, methodTable);
ptr->finishCreation(vm);
return ptr;
}
void GlobalObject::finishCreation(JSC::VM& vm)
void DevGlobalObject::finishCreation(JSC::VM& vm)
{
Base::finishCreation(vm);
ASSERT(inherits(info()));
@@ -103,8 +77,7 @@ void GlobalObject::finishCreation(JSC::VM& vm)
extern "C" BunVirtualMachine* Bun__getVM();
// A lot of this function is taken from 'Zig__GlobalObject__create'
// TODO: remove this entire method
extern "C" GlobalObject* BakeCreateDevGlobal(DevServer* owner,
extern "C" DevGlobalObject* BakeCreateDevGlobal(DevServer* owner,
void* console)
{
JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef();
@@ -113,11 +86,11 @@ extern "C" GlobalObject* BakeCreateDevGlobal(DevServer* owner,
BunVirtualMachine* bunVM = Bun__getVM();
WebCore::JSVMClientData::create(&vm, bunVM);
JSC::Structure* structure = GlobalObject::createStructure(vm);
GlobalObject* global = GlobalObject::create(
vm, structure, &GlobalObject::s_globalObjectMethodTable);
JSC::Structure* structure = DevGlobalObject::createStructure(vm);
DevGlobalObject* global = DevGlobalObject::create(
vm, structure, &DevGlobalObject::s_globalObjectMethodTable);
if (!global)
BUN_PANIC("Failed to create BakeGlobalObject");
BUN_PANIC("Failed to create DevGlobalObject");
global->m_devServer = owner;
global->m_bunVM = bunVM;
@@ -142,25 +115,4 @@ extern "C" GlobalObject* BakeCreateDevGlobal(DevServer* owner,
return global;
}
extern "C" GlobalObject* BakeCreateProdGlobal(JSC::VM* vm, void* console)
{
JSC::JSLockHolder locker(vm);
BunVirtualMachine* bunVM = Bun__getVM();
JSC::Structure* structure = GlobalObject::createStructure(*vm);
GlobalObject* global = GlobalObject::create(*vm, structure, &GlobalObject::s_globalObjectMethodTable);
if (!global)
BUN_PANIC("Failed to create BakeGlobalObject");
global->m_devServer = nullptr;
global->m_bunVM = bunVM;
JSC::gcProtect(global);
global->setConsole(console);
global->setStackTraceLimit(10); // Node.js defaults to 10
return global;
}
}; // namespace Bake

View File

@@ -8,18 +8,15 @@ struct DevServer; // DevServer.zig
struct Route; // DevServer.zig
struct BunVirtualMachine;
class GlobalObject : public Zig::GlobalObject {
class DevGlobalObject : public Zig::GlobalObject {
public:
using Base = Zig::GlobalObject;
/// Null if in production
DevServer* m_devServer;
template<typename, JSC::SubspaceAccess mode> static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
{
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
return nullptr;
return WebCore::subspaceForImpl<GlobalObject, WebCore::UseCustomHeapCellType::Yes>(
return WebCore::subspaceForImpl<DevGlobalObject, WebCore::UseCustomHeapCellType::Yes>(
vm,
[](auto& spaces) { return spaces.m_clientSubspaceForBakeGlobalScope.get(); },
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceForBakeGlobalScope = std::forward<decltype(space)>(space); },
@@ -29,18 +26,18 @@ public:
}
static const JSC::GlobalObjectMethodTable s_globalObjectMethodTable;
static GlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable);
static DevGlobalObject* create(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable);
ALWAYS_INLINE bool isProduction() const { return !m_devServer; }
DevServer* m_devServer;
void finishCreation(JSC::VM& vm);
GlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
DevGlobalObject(JSC::VM& vm, JSC::Structure* structure, const JSC::GlobalObjectMethodTable* methodTable)
: Zig::GlobalObject(vm, structure, methodTable) { }
};
// Zig API
extern "C" void KitInitProcessIdentifier();
extern "C" GlobalObject* KitCreateDevGlobal(DevServer* owner, void* console);
extern "C" DevGlobalObject* KitCreateDevGlobal(DevServer* owner, void* console);
}; // namespace Kit

View File

@@ -0,0 +1,79 @@
// clang-format off
#include "BakeDevSourceProvider.h"
#include "BakeDevGlobalObject.h"
#include "JavaScriptCore/Completion.h"
#include "JavaScriptCore/Identifier.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSLock.h"
#include "JavaScriptCore/JSMap.h"
#include "JavaScriptCore/JSModuleLoader.h"
#include "JavaScriptCore/JSString.h"
#include "JavaScriptCore/JSModuleNamespaceObject.h"
namespace Bake {
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(DevGlobalObject* global, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
JSC::JSString* key = JSC::jsString(vm, string);
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, {});
JSC::JSInternalPromise* internalPromise = global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
RETURN_IF_EXCEPTION(scope, {});
return { internalPromise, key };
}
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(DevGlobalObject* global, BunString source) {
JSC::VM&vm=global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.patch.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Program
));
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
RELEASE_ASSERT(result);
return JSC::JSValue::encode(result);
}
extern "C" JSC::EncodedJSValue BakeGetRequestHandlerFromModule(
DevGlobalObject* global,
JSC::JSString* key
) {
JSC::VM&vm = global->vm();
JSC::JSMap* map = JSC::jsCast<JSC::JSMap*>(
global->moduleLoader()->getDirect(
vm, JSC::Identifier::fromString(global->vm(), "registry"_s)
));
JSC::JSValue entry = map->get(global, key);
ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise
JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s));
ASSERT(module.isCell());
JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module);
ASSERT(namespaceObject);
return JSC::JSValue::encode(namespaceObject->get(global, vm.propertyNames->defaultKeyword));
}
} // namespace Bake

View File

@@ -1,7 +1,7 @@
#pragma once
#include "root.h"
#include "headers-handwritten.h"
#include "BakeGlobalObject.h"
#include "BakeDevGlobalObject.h"
#include "JavaScriptCore/SourceOrigin.h"
namespace Bake {
@@ -40,4 +40,9 @@ private:
) {}
};
// Zig API
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(DevGlobalObject* global, BunString source);
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(DevGlobalObject* global, BunString source);
extern "C" JSC::EncodedJSValue BakeGetRequestHandlerFromModule(DevGlobalObject* global, JSC::JSString* encodedModule);
} // namespace Bake

View File

@@ -1,39 +0,0 @@
#include "BakeProduction.h"
#include "BunBuiltinNames.h"
#include "WebCoreJSBuiltins.h"
#include "JavaScriptCore/JSPromise.h"
#include "JavaScriptCore/Exception.h"
namespace Bake {
extern "C" JSC::JSPromise* BakeRenderRoutesForProd(
JSC::JSGlobalObject* global,
BunString outbase,
JSC::JSValue renderStaticCallback,
JSC::JSValue clientEntryUrl,
JSC::JSValue files,
JSC::JSValue patterns,
JSC::JSValue styles)
{
JSC::VM& vm = global->vm();
JSC::JSFunction* cb = JSC::JSFunction::create(vm, global, WebCore::bakeRenderRoutesForProdCodeGenerator(vm), global);
JSC::CallData callData = JSC::getCallData(cb);
JSC::MarkedArgumentBuffer args;
args.append(JSC::jsString(vm, outbase.toWTFString()));
args.append(renderStaticCallback);
args.append(clientEntryUrl);
args.append(files);
args.append(patterns);
args.append(styles);
NakedPtr<JSC::Exception> returnedException = nullptr;
auto result = JSC::call(global, cb, callData, JSC::jsUndefined(), args, returnedException);
if (UNLIKELY(returnedException)) {
// This should be impossible because it returns a promise.
return JSC::JSPromise::rejectedPromise(global, returnedException->value());
}
return JSC::jsCast<JSC::JSPromise*>(result);
}
} // namespace Bake

View File

@@ -1,5 +0,0 @@
#include "root.h"
#include "headers-handwritten.h"
namespace Bake {
} // namespace Bake

View File

@@ -1,127 +0,0 @@
// clang-format off
#include "BakeSourceProvider.h"
#include "BakeGlobalObject.h"
#include "JavaScriptCore/Completion.h"
#include "JavaScriptCore/Identifier.h"
#include "JavaScriptCore/JSCJSValue.h"
#include "JavaScriptCore/JSCast.h"
#include "JavaScriptCore/JSLock.h"
#include "JavaScriptCore/JSMap.h"
#include "JavaScriptCore/JSModuleLoader.h"
#include "JavaScriptCore/JSString.h"
#include "JavaScriptCore/JSModuleNamespaceObject.h"
namespace Bake {
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(GlobalObject* global, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.js"_s;
JSC::JSString* key = JSC::jsString(vm, string);
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, {});
JSC::JSInternalPromise* internalPromise = global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
RETURN_IF_EXCEPTION(scope, {});
return { internalPromise, key };
}
extern "C" JSC::JSInternalPromise* BakeLoadModuleByKey(GlobalObject* global, JSC::JSString* key) {
return global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
}
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunString source) {
JSC::VM&vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = "bake://server.patch.js"_s;
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Program
));
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
RELEASE_ASSERT(result);
return JSC::JSValue::encode(result);
}
extern "C" JSC::EncodedJSValue BakeGetModuleNamespace(
JSC::JSGlobalObject* global,
JSC::JSValue keyValue
) {
JSC::JSString* key = JSC::jsCast<JSC::JSString*>(keyValue);
JSC::VM& vm = global->vm();
JSC::JSMap* map = JSC::jsCast<JSC::JSMap*>(
global->moduleLoader()->getDirect(
vm, JSC::Identifier::fromString(global->vm(), "registry"_s)
));
JSC::JSValue entry = map->get(global, key);
ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise
JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s));
ASSERT(module.isCell());
JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module);
ASSERT(namespaceObject);
return JSC::JSValue::encode(namespaceObject);
}
extern "C" JSC::EncodedJSValue BakeGetDefaultExportFromModule(
JSC::JSGlobalObject* global,
JSC::JSValue keyValue
) {
JSC::VM& vm = global->vm();
return JSC::JSValue::encode(jsCast<JSC::JSModuleNamespaceObject*>(JSC::JSValue::decode(BakeGetModuleNamespace(global, keyValue)))->get(global, vm.propertyNames->defaultKeyword));
}
// There were issues when trying to use JSValue.get from zig
extern "C" JSC::EncodedJSValue BakeGetOnModuleNamespace(
JSC::JSGlobalObject* global,
JSC::JSModuleNamespaceObject* moduleNamespace,
const unsigned char* key,
size_t keyLength
) {
JSC::VM& vm = global->vm();
const auto propertyString = String(StringImpl::createWithoutCopying({ key, keyLength }));
const auto identifier = JSC::Identifier::fromString(vm, propertyString);
const auto property = JSC::PropertyName(identifier);
return JSC::JSValue::encode(moduleNamespace->get(global, property));
}
extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* global, BunString virtualPathName, BunString source) {
JSC::VM& vm = global->vm();
auto scope = DECLARE_THROW_SCOPE(vm);
String string = virtualPathName.toWTFString();
JSC::JSString* key = JSC::jsString(vm, string);
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
source.toWTFString(),
origin,
WTFMove(string),
WTF::TextPosition(),
JSC::SourceProviderSourceType::Module
));
global->moduleLoader()->provideFetch(global, key, sourceCode);
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
return JSC::JSValue::encode(key);
}
} // namespace Bake

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +0,0 @@
//! Discovers routes from the filesystem, as instructed by the framework
//! configuration. Supports incrementally updating for DevServer, or
//! serializing to a binary for production builds.

72
src/bake/bake.d.ts vendored
View File

@@ -1,8 +1,6 @@
declare module "bun" {
declare function wipDevServerExpectHugeBreakingChanges(options: Bake.Options): never;
type Awaitable<T> = T | Promise<T>;
declare namespace Bake {
interface Options {
/**
@@ -56,7 +54,7 @@ declare module "bun" {
/**
* Add extra modules
*/
builtInModules?: Record<string, BuiltInModule>;
builtInModules: Record<string, BuiltInModule>;
/**
* Bun offers integration for React's Server Components with an
* interface that is generic enough to adapt to any framework.
@@ -85,14 +83,12 @@ declare module "bun" {
* If you are unsure what to set this to for a custom server components
* framework, choose 'false'.
*
* When set `true`, bundling "use client" components for SSR will be
* placed in a separate bundling graph without the `react-server`
* condition. All imports that stem from here get re-bundled for
* this second graph, regardless if they actually differ via this
* condition.
* When set `true`, when bundling "use client" components for SSR, these
* files will be placed in a separate bundling graph where `conditions` does
* not include `react-server`.
*
* The built in framework config for React enables this flag so that server
* components and client components utilize their own versions of React,
* components and client components, utilize their own versions of React,
* despite running in the same process. This facilitates different aspects
* of the server and client react runtimes, such as `async` components only
* being available on the server.
@@ -132,40 +128,14 @@ declare module "bun" {
* during rendering.
*/
serverRegisterClientReferenceExport: string | undefined;
// /**
// * Allow creating client components inside of server-side files by using "use client"
// * as the first line of a function declaration. This is useful for small one-off
// * interactive components. This is behind a flag because it is not a feature of
// * React or Next.js, but rather is implemented because it is possible to.
// *
// * The client versions of these are tree-shaked extremely aggressively: anything
// * not referenced by the function body will be removed entirely.
// */
// allowAnonymousClientComponents: boolean;
}
/** Customize the React Fast Refresh transform. */
interface ReactFastRefreshOptions {
/**
* This import has four exports, mirroring "react-refresh/runtime":
*
* `injectIntoGlobalHook(window): void`
* Called on first startup, before the user entrypoint.
*
* `register(component, uniqueId: string): void`
* Called on every function that starts with an uppercase letter. These
* may or may not be components, but they are always functions.
*
* `createSignatureFunctionForTransform(): ReactRefreshSignatureFunction`
* TODO: document. A passing no-op for this api is `return () => {}`
*
* @default "react-refresh/runtime"
*/
/** @default "react-refresh/runtime" */
importSource: ImportSource | undefined;
}
type ReactRefreshSignatureFunction = () => void | ((func: Function, hash: string, force?: bool, customHooks?: () => Function[]) => void);
/// Will be resolved from the point of view of the framework user's project root
/// Examples: `react-dom`, `./entry_point.tsx`, `/absolute/path.js`
type ImportSource = string;
@@ -175,13 +145,7 @@ declare module "bun" {
* The framework implementation decides and enforces the shape
* of the route module. Bun passes it as an opaque value.
*/
default: (request: Request, routeModule: unknown, routeMetadata: RouteMetadata) => Awaitable<Response>;
/**
* Static rendering does not take a response in, and can generate
* multiple output files. Note that `import.meta.env.STATIC` will
* be inlined to true during a static build.
*/
staticRender: (routeModule: unknown, routeMetadata: RouteMetadata) => Awaitable<Record<string, Blob | ArrayBuffer>>;
default: (request: Request, routeModule: unknown, routeMetadata: RouteMetadata) => Response;
}
interface ClientEntryPoint {
@@ -194,25 +158,11 @@ declare module "bun" {
onServerSideReload?: () => void;
}
/**
* This object and it's children may be re-used between invocations, so it
* is not safe to mutate it at all.
*/
interface RouteMetadata {
/**
* A list of js files that the route will need to be interactive.
*/
readonly scripts: ReadonlyArray<string>;
/**
* A list of css files that the route will need to be styled.
*/
readonly styles: ReadonlyArray<string>;
/**
* Can be used by the framework to mention the route file. Only provided in
* development mode to prevent leaking these details into production
* builds.
*/
devRoutePath?: string;
/** A list of css files that the route will need to be styled */
styles: string[];
/** A list of js files that the route will need to be interactive */
scripts: string[];
}
}

View File

@@ -11,8 +11,6 @@ interface Config {
separateSSRGraph?: true;
// Client
/** Dev Server's `configuration_hash_key` */
version: string;
/** If available, this is the Id of `react-refresh/runtime` */
refresh?: Id;
/**
@@ -41,7 +39,7 @@ declare const side: "client" | "server";
* interface as opposed to a WebSocket connection.
*/
declare var server_exports: {
handleRequest: (req: Request, routeModuleId: Id, clientEntryUrl: string, styles: string[]) => any;
handleRequest: (req: Request, meta: HandleRequestMeta, id: Id) => any;
registerUpdate: (
modules: any,
componentManifestAdd: null | string[],
@@ -49,6 +47,11 @@ declare var server_exports: {
) => void;
};
interface HandleRequestMeta {
// url for script tag
clientEntryPoint: string;
}
/*
* If you are running a debug build of Bun. These debug builds should provide
* helpful information to someone working on the bundler itself.

View File

@@ -3,11 +3,6 @@
//! server, server components, and other integrations. Instead of taking the
//! role as a framework, Bake is tool for frameworks to build on top of.
/// Zig version of TS definition 'Bake.Options' in 'bake.d.ts'
pub const UserOptions = struct {
framework: Framework,
};
/// Temporary function to invoke dev server via JavaScript. Will be
/// replaced with a user-facing API. Refs the event loop forever.
pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSValue {
@@ -24,7 +19,7 @@ pub fn jsWipDevServer(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JS
, .{});
bun.Output.flush();
const options = bakeOptionsFromJs(global, callframe.argument(0)) catch {
const options = devServerOptionsFromJs(global, callframe.argument(0)) catch {
if (!global.hasException())
global.throwInvalidArguments("invalid arguments", .{});
return .zero;
@@ -154,9 +149,6 @@ pub const Framework = struct {
if (str.eqlComptime("react-server-components")) {
return Framework.react();
}
if (str.eqlComptime("react")) {
return Framework.react();
}
}
if (!opts.isObject()) {
@@ -206,22 +198,8 @@ pub const Framework = struct {
global.throwInvalidArguments("'framework.reactFastRefresh' must be an object or 'true'", .{});
return error.JSError;
}
const prop = rfr.get(global, "importSource") orelse {
global.throwInvalidArguments("'framework.reactFastRefresh' is missing 'importSource'", .{});
return error.JSError;
};
const str = prop.toBunString(global);
defer str.deref();
if (global.hasException())
return error.JSError;
// Leak
break :brk .{
.import_source = str.toUTF8(bun.default_allocator).slice(),
};
// in addition to here, this import isnt actually wired up to js_parser where the default is hardcoded.
bun.todoPanic(@src(), "custom react-fast-refresh import source", .{});
},
.server_components = sc: {
const sc: JSValue = opts.get(global, "serverComponents") orelse {
@@ -278,86 +256,11 @@ pub const Framework = struct {
},
};
}
pub fn initBundler(
framework: *Framework,
allocator: std.mem.Allocator,
log: *bun.logger.Log,
mode: Mode,
comptime renderer: Graph,
out: *bun.bundler.Bundler,
) !void {
out.* = try bun.Bundler.init(
allocator, // TODO: this is likely a memory leak
log,
std.mem.zeroes(bun.Schema.Api.TransformOptions),
null,
);
out.options.target = switch (renderer) {
.client => .browser,
.server, .ssr => .bun,
};
out.options.public_path = switch (renderer) {
.client => DevServer.client_prefix,
.server, .ssr => "",
};
out.options.entry_points = &.{};
out.options.log = log;
out.options.output_format = switch (mode) {
.development => .internal_bake_dev,
.production => .esm,
};
out.options.out_extensions = bun.StringHashMap([]const u8).init(out.allocator);
out.options.hot_module_reloading = mode == .development;
out.options.code_splitting = mode == .production;
// force disable filesystem output, even though bundle_v2
// is special cased to return before that code is reached.
out.options.output_dir = "";
// framework configuration
out.options.react_fast_refresh = mode == .development and renderer == .client and framework.react_fast_refresh != null;
out.options.server_components = framework.server_components != null;
out.options.conditions = try bun.options.ESMConditions.init(allocator, out.options.target.defaultConditions());
if (renderer == .server and framework.server_components != null) {
try out.options.conditions.appendSlice(&.{"react-server"});
}
out.options.production = mode == .production;
out.options.tree_shaking = mode == .production;
out.options.minify_syntax = true; // required for DCE
// out.options.minify_identifiers = mode == .production;
// out.options.minify_whitespace = mode == .production;
out.options.experimental_css = true;
out.options.css_chunking = true;
out.options.framework = framework;
out.configureLinker();
try out.configureDefines();
out.options.jsx.development = mode == .development;
try addImportMetaDefines(allocator, out.options.define, mode, switch (renderer) {
.client => .client,
.server, .ssr => .server,
});
if (mode == .production) {
out.options.entry_naming = "[name]-[hash].[ext]";
out.options.chunk_naming = "chunk-[name]-[hash].[ext]";
}
out.resolver.opts = out.options;
}
};
// TODO: this function leaks memory and bad error handling, but that is OK since
pub fn bakeOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options {
// this API is not finalized.
fn devServerOptionsFromJs(global: *JSC.JSGlobalObject, options: JSValue) !DevServer.Options {
if (!options.isObject()) return error.Invalid;
const routes_js = try options.getArray(global, "routes") orelse return error.Invalid;
@@ -464,17 +367,18 @@ pub fn addImportMetaDefines(
pub const server_virtual_source: bun.logger.Source = .{
.path = bun.fs.Path.initForKitBuiltIn("bun", "bake/server"),
.key_path = bun.fs.Path.initForKitBuiltIn("bun", "bake/server"),
.contents = "", // Virtual
.index = bun.JSAst.Index.bake_server_data,
};
pub const client_virtual_source: bun.logger.Source = .{
.path = bun.fs.Path.initForKitBuiltIn("bun", "bake/client"),
.key_path = bun.fs.Path.initForKitBuiltIn("bun", "bake/client"),
.contents = "", // Virtual
.index = bun.JSAst.Index.bake_client_data,
};
pub const production = @import("./production.zig");
pub const DevServer = @import("./DevServer.zig");
const std = @import("std");

View File

@@ -24,7 +24,7 @@ const root = hydrateRoot(document, <Async />, {
});
export async function onServerSideReload() {
const response = await fetch(location.href + '/index.rsc', {
const response = await fetch(location.href, {
headers: {
Accept: "text/x-component",
},

View File

@@ -1,27 +0,0 @@
// This file is unused by Bun itself, but rather is a tool for
// contributors to hack on `bun-framework-react` without needing
// to compile bun itself. If changes to this are made, please
// update 'pub fn react' in 'bake.zig'
import type { Bake } from "bun";
export function react(): Bake.Framework {
return {
// When the files are embedded in the Bun binary, relative
// path resolution does not work.
builtInModules: {
'bun-framework-react/client.tsx': { path: require.resolve('./client.tsx') },
'bun-framework-react/server.tsx': { path: require.resolve('./server.tsx') },
'bun-framework-react/ssr.tsx': { path: require.resolve('./ssr.tsx') },
},
clientEntryPoint: "bun-framework-react/client.tsx",
serverEntryPoint: "bun-framework-react/server.tsx",
reactFastRefresh: {
importSource: "react-refresh/runtime",
},
serverComponents: {
separateSSRGraph: true,
serverRegisterClientReferenceExport: 'registerClientReference',
serverRuntimeImportSource: 'react-server-dom-webpack/server'
}
};
}

View File

@@ -1,43 +1,14 @@
import type { Bake } from "bun";
import { renderToReadableStream } from "react-server-dom-webpack/server.browser";
import { renderToHtml } from "bun-framework-rsc/ssr.tsx" with { bunBakeGraph: "ssr" };
import { clientManifest, serverManifest } from "bun:bake/server";
import { join } from 'node:path';
function getPage(route, meta: Bake.RouteMetadata) {
const Route = route.default;
const { styles } = meta;
if (import.meta.env.DEV) {
if (typeof Route !== "function") {
throw new Error(
"Expected the default export of " +
JSON.stringify(meta.devRoutePath) +
" to be a React component, got " +
JSON.stringify(Route),
);
}
}
return (
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Bun + React Server Components</title>
{styles.map(url => (
<link key={url} rel="stylesheet" href={url} />
))}
</head>
<body>
<Route />
</body>
</html>
);
}
import { serverManifest } from "bun:bake/server";
// `server.tsx` exports a function to be used for handling user routes. It takes
// in the Request object, the route's module, and extra route metadata.
export default async function render(request: Request, route: any, meta: Bake.RouteMetadata): Promise<Response> {
export default async function (request: Request, route: any, meta: Bake.RouteMetadata): Promise<Response> {
// TODO: be able to signal to Bake that Accept may include this, so that
// static pages can be pre-rendered both as RSC payload + HTML.
// The framework generally has two rendering modes.
// - Standard browser navigation
// - Client-side navigation
@@ -47,7 +18,21 @@ export default async function render(request: Request, route: any, meta: Bake.Ro
// rendering modes. This is signaled by `client.tsx` via the `Accept` header.
const skipSSR = request.headers.get("Accept")?.includes("text/x-component");
const page = getPage(route, meta);
const Route = route.default;
const page = (
<html lang="en">
<head>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>Bun + React Server Components</title>
{meta.styles.map(url => (
<link key={url} rel="stylesheet" href={url} />
))}
</head>
<body>
<Route />
</body>
</html>
);
// This renders Server Components to a ReadableStream "RSC Payload"
const rscPayload = renderToReadableStream(page, serverManifest);
@@ -59,7 +44,7 @@ export default async function render(request: Request, route: any, meta: Bake.Ro
}
// One straem is used to render SSR. The second is embedded into the html for browser hydration.
// Note: This approach does not stream the response. That practice is called "react flight" and should be added
// Note: This approach does not stream the response.
const [rscPayload1, rscPayload2] = rscPayload.tee();
const rscPayloadBuffer = Bun.readableStreamToText(rscPayload1);
const rw = new HTMLRewriter();
@@ -84,44 +69,3 @@ export default async function render(request: Request, route: any, meta: Bake.Ro
}),
);
}
// For static site generation, a different function is given, one without a request object.
export async function renderStatic(route: any, meta: Bake.RouteMetadata) {
const page = getPage(route, meta);
const rscPayload = renderToReadableStream(page, serverManifest);
const [rscPayload1, rscPayload2] = rscPayload.tee();
// Prepare both files in parallel
let [html, rscPayloadBuffer] = await Promise.all([
Bun.readableStreamToText(await renderToHtml(rscPayload2)),
Bun.readableStreamToText(rscPayload1),
]);
const scripts = meta.scripts.map(url => `<script src=${JSON.stringify(url)}></script>`);
html = html.replace('</body>', `<script id="rsc_payload" type="json">${rscPayloadBuffer}</script>${scripts.join('\n')}</body>`);
// Each route generates a directory with framework-provided files. Keys are
// files relative to the route path, and values are anything `Bun.write`
// supports. Streams may result in lower memory usage.
return {
// Directories like `blog/index.html` are preferred over `blog.html` because
// certain static hosts do not support this conversion. By using `index.html`,
// the static build is more portable.
'/index.html': html,
// The RSC payload is provided so client-side can use this file for seamless
// client-side navigation. This is equivalent to 'Accept: text/x-component'
// for the non-static build.s
'/index.rsc': rscPayloadBuffer,
}
}
// This is a hack to make react-server-dom-webpack work with Bun's bundler.
// It will be removed once Bun acquires react-server-dom-bun.
if (!import.meta.env.DEV) {
globalThis.__webpack_require__ = (id: string) => {
console.log("Bun: __webpack_require__", id);
const y = import.meta.require(join(import.meta.dir, id));
console.log({y});
return y;
};
}

View File

@@ -6,7 +6,7 @@ export interface DeserializedFailure {
// If not specified, it is a client-side error.
file: string | null;
messages: BundlerMessage[];
}
};
export interface BundlerMessage {
kind: "bundler";
@@ -50,7 +50,7 @@ function readLogMsg(r: DataViewReader, level: BundlerMessageLevel) {
notes[i] = readLogData(r);
}
return {
kind: "bundler",
kind: 'bundler',
level,
message,
location,

View File

@@ -98,7 +98,7 @@ pre {
cursor: pointer;
}
.file-name:hover,
/* .file-name:hover,
.file-name:focus-visible {
background-color: var(--item-bg-hover);
}
@@ -108,21 +108,20 @@ pre {
font-size: 70%;
}
.file-name:hover::after,
.file-name:focus-visible {
.file-name:hover::after {
content: " (click to open in editor)";
}
} */
.message {
margin: 1rem;
margin-bottom: 0;
}
button + .message {
button+.message {
margin-top: 0.5rem;
}
.message-text > span {
.message-text>span {
color: var(--color);
}
@@ -169,8 +168,9 @@ button + .message {
}
@media (prefers-color-scheme: light) {
.log-warn,
.log-note {
font-weight: bold;
}
}
}

View File

@@ -11,13 +11,7 @@
// added or previous ones are solved.
import { BundlerMessageLevel } from "../enums";
import { css } from "../macros" with { type: "macro" };
import {
BundlerMessage,
BundlerMessageLocation,
BundlerNote,
decodeSerializedError,
type DeserializedFailure,
} from "./error-serialization";
import { BundlerMessage, BundlerMessageLocation, BundlerNote, decodeSerializedError, type DeserializedFailure } from "./error-serialization";
import { DataViewReader } from "./reader";
if (side !== "client") throw new Error("Not client side!");
@@ -178,22 +172,11 @@ export function updateErrorOverlay() {
// Create the element for the root if it does not yet exist.
if (!dom) {
let title;
let btn;
const root = elem("div", { class: "message-group" }, [
(btn = elem("button", { class: "file-name" }, [(title = textNode())])),
elem("button", { class: "file-name" }, [
title = textNode()
]),
]);
btn.addEventListener("click", () => {
const firstLocation = errors.get(owner)?.messages[0]?.location;
if (!firstLocation) return;
let fileName = title.textContent.replace(/^\//, "");
fetch("/_bun/src/" + fileName, {
headers: {
"Open-In-Editor": "1",
"Editor-Line": firstLocation.line.toString(),
"Editor-Column": firstLocation.column.toString(),
},
});
});
dom = { root, title, messages: [] };
// TODO: sorted insert?
domErrorList.appendChild(root);
@@ -220,48 +203,50 @@ export function updateErrorOverlay() {
setModalVisible(true);
}
const bundleLogLevelToName = ["error", "warn", "note", "debug", "verbose"];
const bundleLogLevelToName = [
"error",
"warn",
"note",
"debug",
"verbose",
];
function renderBundlerMessage(msg: BundlerMessage) {
return elem(
"div",
{ class: "message" },
[
renderErrorMessageLine(msg.level, msg.message),
...(msg.location ? renderCodeLine(msg.location, msg.level) : []),
...msg.notes.map(renderNote),
].flat(1),
);
return elem('div', { class: 'message' }, [
renderErrorMessageLine(msg.level, msg.message),
...msg.location ? renderCodeLine(msg.location, msg.level) : [],
...msg.notes.map(renderNote),
].flat(1));
}
function renderErrorMessageLine(level: BundlerMessageLevel, text: string) {
const levelName = bundleLogLevelToName[level];
if (IS_BUN_DEVELOPMENT && !levelName) {
if(IS_BUN_DEVELOPMENT && !levelName) {
throw new Error("Unknown log level: " + level);
}
return elem("div", { class: "message-text" }, [
elemText("span", { class: "log-" + levelName }, levelName),
elemText("span", { class: "log-colon" }, ": "),
elemText("span", { class: "log-text" }, text),
return elem('div', { class: 'message-text' } , [
elemText('span', { class: 'log-' + levelName }, levelName),
elemText('span', { class: 'log-colon' }, ': '),
elemText('span', { class: 'log-text' }, text),
]);
}
function renderCodeLine(location: BundlerMessageLocation, level: BundlerMessageLevel) {
return [
elem("div", { class: "code-line" }, [
elemText("code", { class: "line-num" }, `${location.line}`),
elemText("pre", { class: "code-view" }, location.lineText),
]),
elem("div", { class: "highlight-wrap log-" + bundleLogLevelToName[level] }, [
elemText("span", { class: "space" }, "_".repeat(`${location.line}`.length + location.column - 1)),
elemText("span", { class: "line" }, "_".repeat(location.length)),
elem('div', { class: 'code-line' }, [
elemText('code', { class: 'line-num' }, `${location.line}`),
elemText('pre', { class: 'code-view' }, location.lineText),
]),
elem('div', { class: 'highlight-wrap log-' + bundleLogLevelToName[level] }, [
elemText('span', { class: 'space' }, '_'.repeat(`${location.line}`.length + location.column - 1)),
elemText('span', { class: 'line' }, '_'.repeat(location.length)),
])
];
}
function renderNote(note: BundlerNote) {
return [
renderErrorMessageLine(BundlerMessageLevel.note, note.message),
...(note.location ? renderCodeLine(note.location, BundlerMessageLevel.note) : []),
...note.location ? renderCodeLine(note.location, BundlerMessageLevel.note) : [],
];
}
}

View File

@@ -40,8 +40,4 @@ export class DataViewReader {
hasMoreData() {
return this.cursor < this.view.byteLength;
}
rest() {
return this.view.buffer.slice(this.cursor);
}
}

View File

@@ -2,35 +2,22 @@ const isLocal = location.host === "localhost" || location.host === "127.0.0.1";
function wait() {
return new Promise<void>(done => {
let timer: Timer | null = null;
const onBlur = () => {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
};
let timer;
const onTimeout = () => {
if (timer !== null) clearTimeout(timer);
window.removeEventListener("focus", onTimeout);
window.removeEventListener("blur", onBlur);
document.removeEventListener("focus", onTimeout);
done();
};
window.addEventListener("focus", onTimeout);
if (document.hasFocus()) {
timer = setTimeout(
() => {
timer = null;
onTimeout();
},
isLocal ? 2_500 : 2_500,
);
window.addEventListener("blur", onBlur);
}
document.addEventListener("focus", onTimeout);
timer = setTimeout(
() => {
timer = null;
onTimeout();
},
isLocal ? 2_500 : 30_000,
);
});
}

View File

@@ -1,3 +1,22 @@
// TODO: generate this using information in DevServer.zig
export const enum MessageId {
/// Version packet
version = 86,
/// When visualization mode is enabled, this packet contains
/// the entire serialized IncrementalGraph state.
visualizer = 118,
/// Sent on a successful bundle, containing client code.
hot_update = 40,
/// Sent on a successful bundle, containing a list of
/// routes that are updated.
route_update = 82,
/// Sent when the list of errors changes.
errors = 69,
/// Sent when all errors are cleared. Semi-redundant
errors_cleared = 99,
}
export const enum BundlerMessageLevel {
err = 0,
warn = 1,

75
src/bake/hmr-protocol.md Normal file
View File

@@ -0,0 +1,75 @@
# Kit's WebSocket Protocol
This format is only intended for communication for the browser build of
`hmr-runtime.ts` <-> `DevServer.zig`. Server-side HMR is implemented using a
different interface. This document is aimed for contributors to these
two components; Any other use-case is unsupported.
Every message is to use `.binary`/`ArrayBuffer` transport mode. The first byte
indicates a Message ID, with the length being inferred by the payload size.
All integers are in little-endian
## Client->Server messages
### `v`
Subscribe to visualizer packets (`v`)
## Server->Client messages
### `V`
Version payload. Sent on connection startup. The client should issue a hard-reload
when it does not match the embedded version.
Example:
```
V1.1.30-canary.37+117e1b388
```
### `(`
Hot-module-reloading patch. The entire payload is UTF-8 Encoded JavaScript Payload.
### `R` - Route reload request
Server-side code has reloaded. Client should either refetch the route or perform a hard reload.
- `u32`: Number of updated routes
- For each route:
- `u32`: Route ID
- `u16`: Length of route name.
- `[n]u8`: Route name in UTF-8 encoded text.
### `e` - Error status update
- `u32`: Number of errors removed
- For each removed error:
- `u32` Error owner
- Remainder of payload is repeating each error object:
- `u32` Error owner
- Error Payload
### `v`
Payload for `incremental_visualizer.html`. This can be accessed via `/_bun/incremental_visualizer`.
- `u32`: Number of files in client graph
- For each file in client graph
- `u32`: Length of name. If zero then no other fields are provided.
- `[n]u8`: File path in UTF-8 encoded text
- `u8`: If file is stale, set 1
- `u8`: If file is in server graph, set 1
- `u8`: If file is in ssr graph, set 1
- `u8`: If file is a server-side route root, set 1
- `u8`: If file is a server-side component boundary file, set 1
- `u32`: Number of files in the server graph
- For each file in server graph, repeat the same parser for the clienr graph
- `u32`: Number of client edges. For each,
- `u32`: File index of the dependency file
- `u32`: File index of the imported file
- `u32`: Number of server edges. For each,
- `u32`: File index of the dependency file
- `u32`: File index of the imported file

View File

@@ -7,7 +7,7 @@ import { td } from "./shared";
import { DataViewReader } from "./client/reader";
import { routeMatch } from "./client/route";
import { initWebSocket } from "./client/websocket";
import { MessageId } from "./generated";
import { MessageId } from "./enums";
if (typeof IS_BUN_DEVELOPMENT !== "boolean") {
throw new Error("DCE is configured incorrectly");
@@ -49,49 +49,18 @@ try {
console.error(e);
}
/**
* Map between CSS identifier and its style tag.
* If a file is not present in this map, it might exist as a link tag in the HTML.
*/
const cssStore = new Map<string, CSSStyleSheet>();
let isFirstRun = true;
initWebSocket({
[MessageId.version](view) {
if (td.decode(view.buffer.slice(1)) !== config.version) {
console.error("Version mismatch, hard-reloading");
location.reload();
}
if (isFirstRun) {
isFirstRun = false;
return;
}
// It would be possible to use `performRouteReload` to do a hot-reload,
// but the issue lies in possibly outdated client files. For correctness,
// all client files have to be HMR reloaded or proven unchanged.
// Configuration changes are already handled by the `config.version` data.
location.reload();
// TODO: config.version and verify everything is sane
console.log("VERSION: ", td.decode(view.buffer.slice(1)));
},
[MessageId.hot_update](view) {
const reader = new DataViewReader(view, 1);
const cssCount = reader.u32();
if (cssCount > 0) {
for (let i = 0; i < cssCount; i++) {
const moduleId = reader.stringWithLength(16);
const content = reader.string32();
reloadCss(moduleId, content);
}
}
if (reader.hasMoreData()) {
const code = td.decode(reader.rest());
const modules = (0, eval)(code);
replaceModules(modules);
}
const code = td.decode(view.buffer);
const modules = (0, eval)(code);
replaceModules(modules);
},
[MessageId.errors]: onErrorMessage,
[MessageId.errors_cleared]: onErrorClearedMessage,
[MessageId.route_update](view) {
const reader = new DataViewReader(view, 1);
let routeCount = reader.u32();
@@ -99,32 +68,11 @@ initWebSocket({
while (routeCount > 0) {
routeCount -= 1;
const routeId = reader.u32();
const routePattern = reader.string32();
const routePattern = reader.stringWithLength(reader.u16());
if (routeMatch(routeId, routePattern)) {
performRouteReload();
break;
}
}
},
[MessageId.errors]: onErrorMessage,
[MessageId.errors_cleared]: onErrorClearedMessage,
});
function reloadCss(id: string, newContent: string) {
console.log(`[Bun] Reloading CSS: ${id}`);
// TODO: can any of the following operations throw?
let sheet = cssStore.get(id);
if (!sheet) {
sheet = new CSSStyleSheet();
sheet.replace(newContent);
document.adoptedStyleSheets.push(sheet);
cssStore.set(id, sheet);
// Delete the link tag if it exists
document.querySelector(`link[href="/_bun/css/${id}.css"]`)?.remove();
return;
}
sheet.replace(newContent);
}

View File

@@ -10,7 +10,7 @@ import { decodeAndAppendError, onErrorMessage, updateErrorOverlay } from "./clie
import { DataViewReader } from "./client/reader";
import { routeMatch } from "./client/route";
import { initWebSocket } from "./client/websocket";
import { MessageId } from "./generated";
import { MessageId } from "./enums";
/** Injected by DevServer */
declare const error: Uint8Array;

View File

@@ -8,8 +8,9 @@ if (typeof IS_BUN_DEVELOPMENT !== "boolean") {
throw new Error("DCE is configured incorrectly");
}
// Server Side
server_exports = {
async handleRequest(req, routeModuleId, clientEntryUrl, styles) {
async handleRequest(req, { clientEntryPoint }, requested_id) {
const serverRenderer = loadModule<Bake.ServerEntryPoint>(config.main, LoadModuleType.AssertPresent).exports.default;
if (!serverRenderer) {
@@ -19,10 +20,9 @@ server_exports = {
throw new Error('Framework server entrypoint\'s "default" export is not a function.');
}
const response = await serverRenderer(req, loadModule(routeModuleId, LoadModuleType.AssertPresent).exports, {
styles: styles,
scripts: [clientEntryUrl],
devRoutePath: routeModuleId,
const response = await serverRenderer(req, loadModule(requested_id, LoadModuleType.AssertPresent).exports, {
styles: [],
scripts: [clientEntryPoint],
});
if (!(response instanceof Response)) {

View File

@@ -3,11 +3,14 @@ import { resolve } from "node:path";
// @ts-ignore
export async function css(file: string, is_development: boolean): string {
const { success, stdout, stderr } = await Bun.spawnSync({
cmd: [process.execPath, "build", file, "--experimental-css", ...(is_development ? [] : ["--minify"])],
cwd: import.meta.dir,
stdio: ["ignore", "pipe", "pipe"],
});
if (!success) throw new Error(stderr.toString("utf-8"));
return stdout.toString("utf-8");
// TODO: CI does not have `experimentalCss`
// const { success, stdout, stderr } = await Bun.spawnSync({
// cmd: [process.execPath, "build", file, "--experimental-css", ...(is_development ? [] : ["--minify"])],
// cwd: import.meta.dir,
// stdio: ["ignore", "pipe", "pipe"],
// });
// if (!success) throw new Error(stderr.toString("utf-8"));
// return stdout.toString("utf-8");
return readFileSync(resolve(import.meta.dir, file)).toString('utf-8');
}

View File

@@ -1,489 +0,0 @@
//! Implements building a Bake application to production
pub fn buildCommand(ctx: bun.CLI.Command.Context) !void {
if (!bun.Environment.isDebug) {
Output.errGeneric("Not yet stable. Sorry!", .{});
bun.Global.crash();
}
Output.warn(
\\Be advised that Bun Bake is highly experimental, and its API
\\will have breaking changes. Join the <magenta>#bake<r> Discord
\\channel to help us find bugs: <blue>https://bun.sh/discord<r>
\\
\\
, .{});
Output.flush();
if (ctx.args.entry_points.len > 1) {
Output.errGeneric("bun build --app only accepts one entrypoint", .{});
bun.Global.crash();
}
if (ctx.debug.hot_reload != .none) {
Output.errGeneric("Instead of using --watch, use 'bun run'", .{});
bun.Global.crash();
}
var cwd_buf: bun.PathBuffer = undefined;
const cwd = try bun.getcwd(&cwd_buf);
// Create a VM + global for loading the config file, plugins, and
// performing build time prerendering.
bun.JSC.initialize(false);
bun.JSAst.Expr.Data.Store.create();
bun.JSAst.Stmt.Data.Store.create();
var arena = try bun.MimallocArena.init();
defer arena.deinit();
const allocator = bun.default_allocator;
const vm = try VirtualMachine.init(.{
.allocator = arena.allocator(),
.log = ctx.log,
.args = ctx.args,
.smol = ctx.runtime_options.smol,
});
defer vm.deinit();
var b = &vm.bundler;
vm.preload = ctx.preloads;
vm.argv = ctx.passthrough;
vm.arena = &arena;
vm.allocator = arena.allocator();
b.options.install = ctx.install;
b.resolver.opts.install = ctx.install;
b.resolver.opts.global_cache = ctx.debug.global_cache;
b.resolver.opts.prefer_offline_install = (ctx.debug.offline_mode_setting orelse .online) == .offline;
b.resolver.opts.prefer_latest_install = (ctx.debug.offline_mode_setting orelse .online) == .latest;
b.options.global_cache = b.resolver.opts.global_cache;
b.options.prefer_offline_install = b.resolver.opts.prefer_offline_install;
b.options.prefer_latest_install = b.resolver.opts.prefer_latest_install;
b.resolver.env_loader = b.env;
b.options.minify_identifiers = ctx.bundler_options.minify_identifiers;
b.options.minify_whitespace = ctx.bundler_options.minify_whitespace;
b.options.ignore_dce_annotations = ctx.bundler_options.ignore_dce_annotations;
b.resolver.opts.minify_identifiers = ctx.bundler_options.minify_identifiers;
b.resolver.opts.minify_whitespace = ctx.bundler_options.minify_whitespace;
b.options.env.behavior = .load_all_without_inlining;
vm.event_loop.ensureWaker();
switch (ctx.debug.macros) {
.disable => {
b.options.no_macros = true;
},
.map => |macros| {
b.options.macro_remap = macros;
},
.unspecified => {},
}
b.configureDefines() catch {
bun.bun_js.failWithBuildError(vm);
};
bun.http.AsyncHTTP.loadEnv(vm.allocator, vm.log, b.env);
vm.loadExtraEnvAndSourceCodePrinter();
vm.is_main_thread = true;
JSC.VirtualMachine.is_main_thread_vm = true;
const api_lock = vm.jsc.getAPILock();
defer api_lock.release();
// Load and evaluate the configuration module
Output.prettyErrorln("Loading configuration", .{});
Output.flush();
const unresolved_config_entry_point = if (ctx.args.entry_points.len > 0) ctx.args.entry_points[0] else "./bun.app";
const config_entry_point = b.resolver.resolve(cwd, unresolved_config_entry_point, .entry_point) catch |err| {
if (err == error.ModuleNotFound) {
if (ctx.args.entry_points.len == 0) {
// Onboarding message
Output.err(err,
\\'bun build --app' cannot find your application's config file
\\
\\The default location for this is `bun.app.ts`
\\
\\TODO: insert a link to `bun.sh/docs`
, .{});
bun.Global.crash();
}
}
Output.err(err, "could not resolve application config file '{s}'", .{unresolved_config_entry_point});
bun.Global.crash();
};
const config_entry_point_string = bun.String.createUTF8(config_entry_point.pathConst().?.text);
defer config_entry_point_string.deref();
const config_promise = bun.JSC.JSModuleLoader.loadAndEvaluateModule(vm.global, &config_entry_point_string) orelse {
@panic("TODO");
};
vm.waitForPromise(.{ .internal = config_promise });
var options = switch (config_promise.unwrap(vm.jsc, .mark_handled)) {
.pending => unreachable,
.fulfilled => |resolved| config: {
bun.assert(resolved == .undefined);
const default = BakeGetDefaultExportFromModule(vm.global, config_entry_point_string.toJS(vm.global));
if (!default.isObject()) {
Output.panic("TODO: print this error better, default export is not an object", .{});
}
const app = default.get(vm.global, "app") orelse {
Output.panic("TODO: print this error better, default export needs an 'app' object", .{});
};
if (vm.global.hasException()) {
@panic("pending exception");
}
break :config bake.bakeOptionsFromJs(vm.global, app) catch |err| {
Output.panic("TODO, print this error better: {}", .{err});
};
},
.rejected => |err| {
// dont run on rejected since we fail the build here
vm.printErrorLikeObjectToConsole(err);
if (vm.exit_handler.exit_code == 0) {
vm.exit_handler.exit_code = 1;
}
vm.globalExit();
},
};
const framework = &options.framework;
const separate_ssr_graph = if (framework.server_components) |sc| sc.separate_ssr_graph else false;
// this is probably wrong
const map = try allocator.create(bun.DotEnv.Map);
map.* = bun.DotEnv.Map.init(allocator);
const loader = try allocator.create(bun.DotEnv.Loader);
loader.* = bun.DotEnv.Loader.init(map, allocator);
try loader.map.put("NODE_ENV", "production");
bun.DotEnv.instance = loader;
var client_bundler: bun.bundler.Bundler = undefined;
var server_bundler: bun.bundler.Bundler = undefined;
var ssr_bundler: bun.bundler.Bundler = undefined;
try framework.initBundler(allocator, vm.log, .production, .server, &server_bundler);
try framework.initBundler(allocator, vm.log, .production, .client, &client_bundler);
if (separate_ssr_graph) {
try framework.initBundler(allocator, vm.log, .production, .ssr, &ssr_bundler);
}
// these share pointers right now, so setting NODE_ENV == production
bun.assert(server_bundler.env == client_bundler.env);
framework.* = framework.resolve(&server_bundler.resolver, &client_bundler.resolver) catch {
Output.errGeneric("Failed to resolve all imports required by the framework", .{});
bun.Global.crash();
};
Output.prettyErrorln("Bundling routes", .{});
Output.flush();
// trailing slash
const public_path = "/";
var root_dir_buf: bun.PathBuffer = undefined;
const root_dir_path = bun.path.joinAbsStringBuf(cwd, &root_dir_buf, &.{"dist"}, .auto);
const root_path_trailing = root_dir_path.ptr[0 .. root_dir_path.len + 1];
_ = root_path_trailing; // autofix
root_dir_buf[root_dir_path.len] = std.fs.path.sep;
// server_bundler.options.public_path = root_path_trailing;
// server_bundler.resolver.opts.public_path = root_path_trailing;
var entry_points = std.ArrayList(BakeEntryPoint).init(allocator);
// the ordering of these entrypoints is relied on when inspecting the output chunks.
try entry_points.append(BakeEntryPoint.init(framework.entry_server, .server));
try entry_points.append(BakeEntryPoint.initClientWrapped(framework.entry_client, .client));
for (options.routes) |route| {
try entry_points.append(BakeEntryPoint.init(route.entry_point, .server));
}
const bundled_outputs = try bun.BundleV2.generateFromBakeProductionCLI(
entry_points.items,
&server_bundler,
.{
.framework = framework.*,
.client_bundler = &client_bundler,
.ssr_bundler = if (separate_ssr_graph) &ssr_bundler else &server_bundler,
},
allocator,
.{ .js = vm.event_loop },
);
Output.prettyErrorln("Rendering routes", .{});
Output.flush();
// A separate global object is used for isolation + controlling the available modules
const render_global = BakeCreateProdGlobal(vm.jsc, vm.console);
var root_dir = try std.fs.cwd().makeOpenPath("dist", .{});
defer root_dir.close();
var client_entry_id: u32 = std.math.maxInt(u32);
var server_entry_module_key: JSValue = .undefined;
const route_module_keys = JSValue.createEmptyArray(render_global, options.routes.len);
const route_output_indices = try allocator.alloc(OutputFile.Index, options.routes.len);
var css_chunks_count: usize = 0;
var css_chunks_first: usize = 0;
for (bundled_outputs.items, 0..) |file, i| {
// std.debug.print("{s} - {s} : {s} - {?d}\n", .{
// if (file.side) |s| @tagName(s) else "null",
// file.src_path.text,
// file.dest_path,
// file.entry_point_index,
// });
// std.debug.print("css: {d}\n", .{bun.fmt.fmtSlice(file.referenced_css_files, ", ")});
if (file.loader == .css) {
if (css_chunks_count == 0) css_chunks_first = i;
css_chunks_count += 1;
}
switch (file.side orelse .client) {
.client => {
// client-side resources will be written to disk for usage in on the client side
_ = try file.writeToDisk(root_dir, root_dir_path);
if (file.entry_point_index) |entry_point| {
switch (entry_point) {
1 => client_entry_id = @intCast(i),
else => {},
}
}
},
.server => {
// For Debugging
if (ctx.bundler_options.bake_debug_dump_server)
_ = try file.writeToDisk(root_dir, root_dir_path);
switch (file.output_kind) {
.@"entry-point", .chunk => {
var buf: bun.PathBuffer = undefined;
// TODO: later we can lazily register modules
const module_key = BakeRegisterProductionChunk(
render_global,
bun.String.createUTF8(bun.path.joinAbsStringBuf(cwd, &buf, &.{
root_dir_path,
file.dest_path,
}, .auto)),
file.value.toBunString(),
) catch |err| {
vm.printErrorLikeObjectToConsole(render_global.takeException(err));
if (vm.exit_handler.exit_code == 0) {
vm.exit_handler.exit_code = 1;
}
Output.errGeneric("could not load bundled chunk {} for server-side rendering", .{
bun.fmt.quote(file.dest_path),
});
vm.globalExit();
};
if (file.entry_point_index) |entry_point| {
// classify the entry point. since entry point source indices are
// deterministic, we can map every single one back to the route or
// framework file.
switch (entry_point) {
0 => server_entry_module_key = module_key,
1 => {}, // client entry
else => |j| {
// SCBs are entry points past the two framework entry points
const route_index = j - 2;
if (route_index < options.routes.len) {
route_module_keys.putIndex(vm.global, route_index, module_key);
route_output_indices[route_index] = OutputFile.Index.init(@intCast(i));
}
},
}
}
},
.asset => {},
.bytecode => {},
.sourcemap => @panic("TODO: register source map"),
}
},
}
}
// TODO: umm...
// const primary_global = vm.global;
// vm.global = render_global;
// _ = primary_global;
bun.assert(client_entry_id != std.math.maxInt(u32));
bun.assert(server_entry_module_key != .undefined);
// HACK: react-server-dom-webpack assigns to `__webpack_require__.u`
// We never call this in this context, so we will just make '__webpack_require__' an empty object.
// Right now server.tsx is what controls the value, but imports happen first.
render_global.toJSValue().put(render_global, "__webpack_require__", JSValue.createEmptyObject(render_global, 0));
// Static site generator
const server_entry_point = loadModule(vm, render_global, server_entry_module_key);
const server_render_func: JSValue = BakeGetOnModuleNamespace(render_global, server_entry_point, "renderStatic") orelse {
Output.errGeneric("Framework does not support static site generation", .{});
Output.note("The file {s} is missing the \"renderStatic\" export", .{bun.fmt.quote(framework.entry_server)});
bun.Global.crash();
};
const route_patterns = JSValue.createEmptyArray(render_global, options.routes.len);
const route_style_references = JSValue.createEmptyArray(render_global, options.routes.len);
const css_chunk_js_strings = try allocator.alloc(JSValue, css_chunks_count);
for (bundled_outputs.items[css_chunks_first..][0..css_chunks_count], css_chunk_js_strings) |output_file, *str| {
bun.assert(output_file.dest_path[0] != '.');
bun.assert(output_file.loader == .css);
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(render_global);
}
for (
options.routes,
route_output_indices,
0..,
) |route, output_file_i, i| {
route_patterns.putIndex(render_global, @intCast(i), bun.String.createUTF8(route.pattern).toJS(render_global));
const output_file = &bundled_outputs.items[output_file_i.get()];
const styles = JSValue.createEmptyArray(render_global, output_file.referenced_css_files.len);
for (output_file.referenced_css_files, 0..) |ref, j| {
styles.putIndex(render_global, @intCast(j), css_chunk_js_strings[ref.get() - css_chunks_first]);
}
route_style_references.putIndex(render_global, @intCast(i), styles);
}
const client_entry_url = (try bun.String.createFormat("{s}{s}", .{
public_path,
bundled_outputs.items[client_entry_id].dest_path,
})).toJS(render_global);
const render_promise = BakeRenderRoutesForProd(
render_global,
bun.String.init(root_dir_path),
server_render_func,
client_entry_url,
route_module_keys,
route_patterns,
route_style_references,
);
vm.waitForPromise(.{ .normal = render_promise });
switch (render_promise.unwrap(vm.jsc, .mark_handled)) {
.pending => unreachable,
.fulfilled => {
Output.prettyln("done", .{});
Output.flush();
},
.rejected => |err| {
vm.printErrorLikeObjectToConsole(err);
if (vm.exit_handler.exit_code == 0) {
vm.exit_handler.exit_code = 1;
}
vm.globalExit();
},
}
}
/// unsafe function, must be run outside of the event loop
/// quits the process on exception
fn loadModule(vm: *VirtualMachine, global: *JSC.JSGlobalObject, key: JSValue) JSValue {
const promise = BakeLoadModuleByKey(global, key).asAnyPromise().?.internal;
vm.waitForPromise(.{ .internal = promise });
switch (promise.unwrap(vm.jsc, .mark_handled)) {
.pending => unreachable,
.fulfilled => |val| {
bun.assert(val == .undefined);
return BakeGetModuleNamespace(global, key);
},
.rejected => |err| {
vm.printErrorLikeObjectToConsole(err);
if (vm.exit_handler.exit_code == 0) {
vm.exit_handler.exit_code = 1;
}
vm.globalExit();
},
}
}
// extern apis:
// TODO: Dedupe
extern fn BakeGetDefaultExportFromModule(global: *JSC.JSGlobalObject, key: JSValue) JSValue;
extern fn BakeGetModuleNamespace(global: *JSC.JSGlobalObject, key: JSValue) JSValue;
extern fn BakeLoadModuleByKey(global: *JSC.JSGlobalObject, key: JSValue) JSValue;
fn BakeGetOnModuleNamespace(global: *JSC.JSGlobalObject, module: JSValue, property: []const u8) ?JSValue {
const f = @extern(*const fn (*JSC.JSGlobalObject, JSValue, [*]const u8, usize) callconv(.C) JSValue, .{
.name = "BakeGetOnModuleNamespace",
});
const result: JSValue = f(global, module, property.ptr, property.len);
bun.assert(result != .zero);
return result;
}
extern fn BakeRenderRoutesForProd(
*JSC.JSGlobalObject,
out_base: bun.String,
render_static_cb: JSValue,
client_entry_url: JSValue,
arr: JSValue,
patterns: JSValue,
styles: JSValue,
) *JSC.JSPromise;
extern fn BakeCreateProdGlobal(vm: *JSC.VM, console_ptr: *anyopaque) *JSC.JSGlobalObject;
/// The result of this function is a JSValue that wont be garbage collected, as
/// it will always have at least one reference by the module loader.
fn BakeRegisterProductionChunk(global: *JSC.JSGlobalObject, key: bun.String, source_code: bun.String) bun.JSError!JSValue {
const f = @extern(*const fn (*JSC.JSGlobalObject, bun.String, bun.String) callconv(.C) JSValue, .{
.name = "BakeRegisterProductionChunk",
});
const result: JSValue = f(global, key, source_code);
if (result == .zero) return error.JSError;
bun.assert(result.isString());
return result;
}
fn BakeProdResolve(global: *JSC.JSGlobalObject, a_str: bun.String, specifier_str: bun.String) callconv(.C) bun.String {
var sfa = std.heap.stackFallback(@sizeOf(bun.PathBuffer) * 2, bun.default_allocator);
const alloc = sfa.get();
const specifier = specifier_str.toUTF8(alloc);
defer specifier.deinit();
if (JSC.HardcodedModule.Aliases.get(specifier.slice(), .bun)) |alias| {
return bun.String.static(alias.path);
}
const referrer = a_str.toUTF8(alloc);
defer referrer.deinit();
if (bun.resolver.isPackagePath(specifier.slice())) {
global.throw("Non-relative import {} from {} are not allowed in production assets. This is a bug in Bun's bundler", .{
bun.fmt.quote(specifier.slice()),
bun.fmt.quote(referrer.slice()),
});
return bun.String.dead;
}
return bun.String.createUTF8(bun.path.joinAbs(
bun.Dirname.dirname(u8, referrer.slice()) orelse referrer.slice(),
.auto,
specifier.slice(),
));
}
comptime {
if (bun.FeatureFlags.bake)
@export(BakeProdResolve, .{ .name = "BakeProdResolve" });
}
const std = @import("std");
const bun = @import("root").bun;
const bake = bun.bake;
const Environment = bun.Environment;
const Output = bun.Output;
const BakeEntryPoint = bun.bundle_v2.BakeEntryPoint;
const OutputFile = bun.options.OutputFile;
const JSC = bun.JSC;
const JSValue = JSC.JSValue;
const VirtualMachine = JSC.VirtualMachine;

View File

@@ -3,8 +3,6 @@ const std = @import("std");
pub fn Bitflags(comptime T: type) type {
const tyinfo = @typeInfo(T);
const IntType = tyinfo.Struct.backing_integer.?;
const IntTypeInfo = @typeInfo(IntType);
const IntRepresentingNumOfBits = std.math.IntFittingRange(0, IntTypeInfo.Int.bits);
return struct {
pub inline fn empty() T {
@@ -42,7 +40,7 @@ pub fn Bitflags(comptime T: type) type {
}
pub inline fn remove(this: *T, other: T) void {
this.* = @bitCast(asBits(this.*) & ~asBits(other));
this.* = bitwiseAnd(this.*, ~other);
}
pub inline fn maskOut(this: T, other: T) T {
@@ -53,36 +51,6 @@ pub fn Bitflags(comptime T: type) type {
return @as(IntType, @bitCast(lhs)) & @as(IntType, @bitCast(rhs)) != 0;
}
pub inline fn leadingZeroes(this: T) IntRepresentingNumOfBits {
return @clz(asBits(this));
}
pub inline fn all() T {
var ret: T = @bitCast(@as(IntType, 0));
@setEvalBranchQuota(5000);
inline for (std.meta.fields(T)) |field| {
if (comptime !std.mem.eql(u8, field.name, "__unused")) {
@field(ret, field.name) = true;
}
}
return ret;
}
pub inline fn not(this: T) T {
return fromBitsTruncate(~asBits(this));
}
pub inline fn difference(lhs: T, rhs: T) T {
// 1100 1100 1100
// 1010 0101 0100
return @bitCast(asBits(lhs) & asBits(not(rhs)));
}
/// Convert from a bits value, unsetting any unknown bits.
pub inline fn fromBitsTruncate(bits: IntType) T {
return bitwiseAnd(@bitCast(bits), all());
}
pub inline fn asBits(this: T) IntType {
return @as(IntType, @bitCast(this));
}

View File

@@ -69,22 +69,22 @@ pub const ResolveMessage = struct {
switch (err) {
error.ModuleNotFound => {
if (strings.eqlComptime(referrer, "bun:main")) {
return try std.fmt.allocPrint(allocator, "Module not found '{s}'", .{specifier});
return try std.fmt.allocPrint(allocator, "Module not found \"{s}\"", .{specifier});
}
if (Resolver.isPackagePath(specifier) and !strings.containsChar(specifier, '/')) {
return try std.fmt.allocPrint(allocator, "Cannot find package '{s}' from '{s}'", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot find package \"{s}\" from \"{s}\"", .{ specifier, referrer });
} else {
return try std.fmt.allocPrint(allocator, "Cannot find module '{s}' from '{s}'", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot find module \"{s}\" from \"{s}\"", .{ specifier, referrer });
}
},
error.InvalidDataURL => {
return try std.fmt.allocPrint(allocator, "Cannot resolve invalid data URL '{s}' from '{s}'", .{ specifier, referrer });
return try std.fmt.allocPrint(allocator, "Cannot resolve invalid data URL \"{s}\" from \"{s}\"", .{ specifier, referrer });
},
else => {
if (Resolver.isPackagePath(specifier)) {
return try std.fmt.allocPrint(allocator, "{s} while resolving package '{s}' from '{s}'", .{ @errorName(err), specifier, referrer });
return try std.fmt.allocPrint(allocator, "{s} while resolving package \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer });
} else {
return try std.fmt.allocPrint(allocator, "{s} while resolving '{s}' from '{s}'", .{ @errorName(err), specifier, referrer });
return try std.fmt.allocPrint(allocator, "{s} while resolving \"{s}\" from \"{s}\"", .{ @errorName(err), specifier, referrer });
}
},
}

View File

@@ -4,8 +4,7 @@
/// Version 5: `require.main === module` no longer marks a module as CJS
/// Version 6: `use strict` is preserved in CommonJS modules when at the top of the file
/// Version 7: Several bundler changes that are likely to impact the runtime as well.
/// Version 8: Fix for generated symbols
const expected_version = 8;
const expected_version = 7;
const bun = @import("root").bun;
const std = @import("std");

View File

@@ -1221,10 +1221,9 @@ pub const Crypto = struct {
pub usingnamespace bun.New(@This());
pub fn init(algorithm: EVP.Algorithm, key: []const u8) ?*HMAC {
const md = algorithm.md() orelse return null;
var ctx: BoringSSL.HMAC_CTX = undefined;
BoringSSL.HMAC_CTX_init(&ctx);
if (BoringSSL.HMAC_Init_ex(&ctx, key.ptr, @intCast(key.len), md, null) != 1) {
if (BoringSSL.HMAC_Init_ex(&ctx, key.ptr, @intCast(key.len), algorithm.md(), null) != 1) {
BoringSSL.HMAC_CTX_cleanup(&ctx);
return null;
}
@@ -2646,7 +2645,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
return JSC.JSValue.zero;
};
@@ -2715,7 +2714,7 @@ pub const Crypto = struct {
BoringSSL.ERR_clear_error();
globalThis.throwValue(instance);
} else {
globalThis.throwTODO("HMAC is not supported for this algorithm yet");
globalThis.throwTODO("HMAC is not supported for this algorithm");
}
}
return null;
@@ -2834,7 +2833,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
globalThis.throwInvalidArguments("Unknown encoding: {}", .{str.*});
return JSC.JSValue.zero;
};
@@ -2965,16 +2964,8 @@ pub const Crypto = struct {
switch (string_or_buffer) {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
return JSC.JSValue.zero;
};
if (encoding == .buffer) {
return hashByNameInnerToBytes(globalThis, Algorithm, input, null);
}
return hashByNameInnerToString(globalThis, Algorithm, input, encoding);
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
return JSC.JSValue.zero;
},
.buffer => |buffer| {
return hashByNameInnerToBytes(globalThis, Algorithm, input, buffer.buffer);
@@ -2984,23 +2975,6 @@ pub const Crypto = struct {
return hashByNameInnerToBytes(globalThis, Algorithm, input, null);
}
fn hashByNameInnerToString(globalThis: *JSGlobalObject, comptime Algorithm: type, input: JSC.Node.BlobOrStringOrBuffer, encoding: JSC.Node.Encoding) JSC.JSValue {
defer input.deinit();
if (input == .blob and input.blob.isBunFile()) {
globalThis.throw("Bun.file() is not supported here yet (it needs an async version)", .{});
return .zero;
}
var h = Algorithm.init(.{});
h.update(input.slice());
var out: [digestLength(Algorithm)]u8 = undefined;
h.final(&out);
return encoding.encodeWithSize(globalThis, digestLength(Algorithm), &out);
}
fn hashByNameInnerToBytes(globalThis: *JSGlobalObject, comptime Algorithm: type, input: JSC.Node.BlobOrStringOrBuffer, output: ?JSC.ArrayBuffer) JSC.JSValue {
defer input.deinit();
@@ -3090,7 +3064,6 @@ pub const Crypto = struct {
fn StaticCryptoHasher(comptime Hasher: type, comptime name: [:0]const u8) type {
return struct {
hashing: Hasher = Hasher{},
digested: bool = false,
const ThisHasher = @This();
@@ -3182,7 +3155,7 @@ pub const Crypto = struct {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
globalThis.throwInvalidArguments("Unknown encoding: {s}", .{str.slice()});
return JSC.JSValue.zero;
};
@@ -3212,10 +3185,6 @@ pub const Crypto = struct {
}
pub fn update(this: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue {
if (this.digested) {
globalThis.ERR_INVALID_STATE(name ++ " hasher already digested, create a new instance to update", .{}).throw();
return .zero;
}
const thisValue = callframe.this();
const input = callframe.argument(0);
const buffer = JSC.Node.BlobOrStringOrBuffer.fromJS(globalThis, globalThis.bunVM().allocator, input) orelse {
@@ -3237,16 +3206,12 @@ pub const Crypto = struct {
globalThis: *JSGlobalObject,
output: ?JSC.Node.StringOrBuffer,
) JSC.JSValue {
if (this.digested) {
globalThis.ERR_INVALID_STATE(name ++ " hasher already digested, create a new instance to digest again", .{}).throw();
return .zero;
}
if (output) |*string_or_buffer| {
switch (string_or_buffer.*) {
inline else => |*str| {
defer str.deinit();
const encoding = JSC.Node.Encoding.from(str.slice()) orelse {
globalThis.ERR_INVALID_ARG_VALUE("Unknown encoding: {s}", .{str.slice()}).throw();
globalThis.throwInvalidArguments("Unknown encoding: \"{s}\"", .{str.slice()});
return JSC.JSValue.zero;
};
@@ -3279,7 +3244,6 @@ pub const Crypto = struct {
}
this.hashing.final(output_digest_slice);
this.digested = true;
if (output) |output_buf| {
return output_buf.value;
@@ -3303,7 +3267,6 @@ pub const Crypto = struct {
const output_digest_slice: *Hasher.Digest = &output_digest_buf;
this.hashing.final(output_digest_slice);
this.digested = true;
return encoding.encodeWithSize(globalThis, Hasher.digest, output_digest_slice);
}
@@ -3342,24 +3305,25 @@ pub fn serve(
const exception = &exception_;
var args = JSC.Node.ArgumentsSlice.init(globalObject.bunVM(), arguments);
var config: JSC.API.ServerConfig = .{};
JSC.API.ServerConfig.fromJS(globalObject, &config, &args, exception);
var config_ = JSC.API.ServerConfig.fromJS(globalObject.ptr(), &args, exception);
if (exception[0] != null) {
config.deinit();
config_.deinit();
globalObject.throwValue(exception_[0].?.value());
return .zero;
return .undefined;
}
if (globalObject.hasException()) {
config.deinit();
config_.deinit();
return .zero;
}
break :brk config;
break :brk config_;
};
var exception_value: *JSC.JSValue = undefined;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
if (config.id.len == 0) {
@@ -3394,43 +3358,98 @@ pub fn serve(
}
}
switch (config.ssl_config != null) {
inline else => |has_ssl_config| {
switch (config.development) {
inline else => |development| {
const ServerType = comptime switch (development) {
true => switch (has_ssl_config) {
true => JSC.API.DebugHTTPSServer,
false => JSC.API.DebugHTTPServer,
},
false => switch (has_ssl_config) {
true => JSC.API.HTTPSServer,
false => JSC.API.HTTPServer,
},
};
var server = ServerType.init(config, globalObject);
if (globalObject.hasException()) {
return .zero;
}
server.listen();
if (globalObject.hasException()) {
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
},
// Listen happens on the next tick!
// This is so we can return a Server object
if (config.ssl_config != null) {
if (config.development) {
var server = JSC.API.DebugHTTPSServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!server.thisObject.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(server.thisObject);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
},
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
} else {
var server = JSC.API.HTTPSServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
}
} else {
if (config.development) {
var server = JSC.API.DebugHTTPServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
} else {
var server = JSC.API.HTTPServer.init(config, globalObject.ptr());
exception_value = &server.thisObject;
server.listen();
if (!exception_value.isEmpty()) {
exception_value.unprotect();
globalObject.throwValue(exception_value.*);
server.thisObject = JSC.JSValue.zero;
server.deinit();
return .zero;
}
const obj = server.toJS(globalObject);
obj.protect();
server.thisObject = obj;
if (config.allow_hot) {
if (globalObject.bunVM().hotMap()) |hot| {
hot.insert(config.id, server);
}
}
return obj;
}
}
unreachable;

View File

@@ -75,7 +75,6 @@ pub const JSBundler = struct {
banner: OwnedString = OwnedString.initEmpty(bun.default_allocator),
footer: OwnedString = OwnedString.initEmpty(bun.default_allocator),
experimental_css: bool = false,
css_chunking: bool = false,
drop: bun.StringSet = bun.StringSet.init(bun.default_allocator),
pub const List = bun.StringArrayHashMapUnmanaged(Config);
@@ -100,15 +99,7 @@ pub const JSBundler = struct {
errdefer if (plugins.*) |plugin| plugin.deinit();
if (config.getTruthy(globalThis, "experimentalCss")) |enable_css| {
this.experimental_css = if (enable_css.isBoolean())
enable_css.toBoolean()
else if (enable_css.isObject()) true: {
if (enable_css.getTruthy(globalThis, "chunking")) |enable_chunking| {
this.css_chunking = if (enable_chunking.isBoolean()) enable_css.toBoolean() else false;
}
break :true true;
} else false;
this.experimental_css = if (enable_css.isBoolean()) enable_css.toBoolean() else false;
}
// Plugins must be resolved first as they are allowed to mutate the config JSValue
@@ -1096,6 +1087,9 @@ pub const BuildArtifact = struct {
chunk,
asset,
@"entry-point",
@"component-manifest",
@"use client",
@"use server",
sourcemap,
bytecode,

View File

@@ -826,7 +826,9 @@ pub fn constructor(
bundler.options.auto_import_jsx = transpiler_options.runtime.auto_import_jsx;
bundler.options.inlining = transpiler_options.runtime.inlining;
bundler.options.hot_module_reloading = transpiler_options.runtime.hot_module_reloading;
bundler.options.react_fast_refresh = false;
bundler.options.react_fast_refresh = bundler.options.hot_module_reloading and
bundler.options.allow_runtime and
transpiler_options.runtime.react_fast_refresh;
const transpiler = allocator.create(Transpiler) catch unreachable;
transpiler.* = Transpiler{

View File

@@ -1141,7 +1141,6 @@ pub const H2FrameParser = struct {
this.signal = null;
signal.deinit();
}
JSC.VirtualMachine.get().eventLoop().processGCTimer();
}
};
@@ -1612,7 +1611,7 @@ pub const H2FrameParser = struct {
// fallback to onWrite non-native callback
const output_value = this.handlers.binary_type.toJS(bytes, this.handlers.globalObject);
const result = this.call(.onWrite, output_value);
const code = if (result.isNumber()) result.to(i32) else -1;
const code = result.to(i32);
switch (code) {
-1 => {
// dropped
@@ -1758,7 +1757,7 @@ pub const H2FrameParser = struct {
return data.len;
}
pub fn decodeHeaderBlock(this: *H2FrameParser, payload: []const u8, stream: *Stream, flags: u8) ?*Stream {
pub fn decodeHeaderBlock(this: *H2FrameParser, payload: []const u8, stream: *Stream, flags: u8) *Stream {
log("decodeHeaderBlock isSever: {}", .{this.isServer});
var offset: usize = 0;
@@ -1777,9 +1776,7 @@ pub const H2FrameParser = struct {
log("header {s} {s}", .{ header.name, header.value });
if (this.isServer and strings.eqlComptime(header.name, ":status")) {
this.sendGoAway(stream_id, ErrorCode.PROTOCOL_ERROR, "Server received :status header", this.lastStreamID, true);
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
return this.streams.getEntry(stream_id).?.value_ptr;
}
count += 1;
if (this.maxHeaderListPairs < count) {
@@ -1789,8 +1786,7 @@ pub const H2FrameParser = struct {
} else {
this.endStream(stream, ErrorCode.ENHANCE_YOUR_CALM);
}
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
return this.streams.getEntry(stream_id).?.value_ptr;
}
const output = brk: {
@@ -1821,8 +1817,7 @@ pub const H2FrameParser = struct {
this.dispatchWith3Extra(.onStreamHeaders, stream.getIdentifier(), headers, sensitiveHeaders, JSC.JSValue.jsNumber(flags));
// callbacks can change the Stream ptr in this case we always return the new one
if (this.streams.getEntry(stream_id)) |entry| return entry.value_ptr;
return null;
return this.streams.getEntry(stream_id).?.value_ptr;
}
pub fn handleDataFrame(this: *H2FrameParser, frame: FrameHeader, data: []const u8, stream_: ?*Stream) usize {
@@ -1887,8 +1882,7 @@ pub const H2FrameParser = struct {
this.currentFrame = null;
if (emitted) {
// we need to revalidate the stream ptr after emitting onStreamData
const entry = this.streams.getEntry(frame.streamIdentifier) orelse return end;
stream = entry.value_ptr;
stream = this.streams.getEntry(frame.streamIdentifier).?.value_ptr;
}
if (frame.flags & @intFromEnum(DataFrameFlags.END_STREAM) != 0) {
const identifier = stream.getIdentifier();
@@ -2035,10 +2029,7 @@ pub const H2FrameParser = struct {
}
if (handleIncommingPayload(this, data, frame.streamIdentifier)) |content| {
const payload = content.data;
stream = this.decodeHeaderBlock(payload[0..payload.len], stream, frame.flags) orelse {
this.readBuffer.reset();
return content.end;
};
stream = this.decodeHeaderBlock(payload[0..payload.len], stream, frame.flags);
this.readBuffer.reset();
if (frame.flags & @intFromEnum(HeadersFrameFlags.END_HEADERS) != 0) {
stream.isWaitingMoreHeaders = false;
@@ -2101,10 +2092,7 @@ pub const H2FrameParser = struct {
this.sendGoAway(frame.streamIdentifier, ErrorCode.FRAME_SIZE_ERROR, "invalid Headers frame size", this.lastStreamID, true);
return data.len;
}
stream = this.decodeHeaderBlock(payload[offset..end], stream, frame.flags) orelse {
this.readBuffer.reset();
return content.end;
};
stream = this.decodeHeaderBlock(payload[offset..end], stream, frame.flags);
this.readBuffer.reset();
stream.isWaitingMoreHeaders = frame.flags & @intFromEnum(HeadersFrameFlags.END_HEADERS) == 0;
if (frame.flags & @intFromEnum(HeadersFrameFlags.END_STREAM) != 0) {
@@ -3265,26 +3253,7 @@ pub const H2FrameParser = struct {
}
return array;
}
pub fn emitAbortToAllStreams(this: *H2FrameParser, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSC.JSValue {
JSC.markBinding(@src());
var it = StreamResumableIterator.init(this);
while (it.next()) |stream| {
// this is the oposite logic of emitErrorToallStreams, in this case we wanna to cancel this streams
if (this.isServer) {
if (stream.id % 2 == 0) continue;
} else if (stream.id % 2 != 0) continue;
if (stream.state != .CLOSED) {
const old_state = stream.state;
stream.state = .CLOSED;
stream.rstCode = @intFromEnum(ErrorCode.CANCEL);
const identifier = stream.getIdentifier();
identifier.ensureStillAlive();
stream.freeResources(this, false);
this.dispatchWith2Extra(.onAborted, identifier, .undefined, JSC.JSValue.jsNumber(@intFromEnum(old_state)));
}
}
return .undefined;
}
pub fn emitErrorToAllStreams(this: *H2FrameParser, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) JSC.JSValue {
JSC.markBinding(@src());
@@ -3296,9 +3265,6 @@ pub const H2FrameParser = struct {
var it = StreamResumableIterator.init(this);
while (it.next()) |stream| {
if (this.isServer) {
if (stream.id % 2 != 0) continue;
} else if (stream.id % 2 == 0) continue;
if (stream.state != .CLOSED) {
stream.state = .CLOSED;
stream.rstCode = args_list.ptr[0].to(u32);
@@ -3709,7 +3675,6 @@ pub const H2FrameParser = struct {
}
const socket_js = args_list.ptr[0];
this.detachNativeSocket();
if (JSTLSSocket.fromJS(socket_js)) |socket| {
log("TLSSocket attached", .{});
if (socket.attachNativeCallback(.{ .h2 = this })) {
@@ -3894,15 +3859,17 @@ pub const H2FrameParser = struct {
}
return this;
}
pub fn detachFromJS(this: *H2FrameParser, _: *JSC.JSGlobalObject, _: *JSC.CallFrame) JSValue {
JSC.markBinding(@src());
this.detach(false);
return .undefined;
}
/// be careful when calling detach be sure that the socket is closed and the parser not accesible anymore
/// this function can be called multiple times, it will erase stream info
pub fn detach(this: *H2FrameParser, comptime finalizing: bool) void {
this.flushCorked();
pub fn deinit(this: *H2FrameParser) void {
log("deinit", .{});
defer {
if (ENABLE_ALLOCATOR_POOL) {
H2FrameParser.pool.?.put(this);
} else {
this.destroy();
}
}
this.detachNativeSocket();
this.strong_ctx.deinit();
this.handlers.deinit();
@@ -3919,24 +3886,9 @@ pub const H2FrameParser = struct {
}
var it = this.streams.valueIterator();
while (it.next()) |stream| {
stream.freeResources(this, finalizing);
stream.freeResources(this, true);
}
var streams = this.streams;
defer streams.deinit();
this.streams = bun.U32HashMap(Stream).init(bun.default_allocator);
}
pub fn deinit(this: *H2FrameParser) void {
log("deinit", .{});
defer {
if (ENABLE_ALLOCATOR_POOL) {
H2FrameParser.pool.?.put(this);
} else {
this.destroy();
}
}
this.detach(true);
this.streams.deinit();
}
pub fn finalize(

Some files were not shown because too many files have changed in this diff Show More