mirror of
https://github.com/oven-sh/bun
synced 2026-02-22 08:41:46 +00:00
Compare commits
1 Commits
jarred/fix
...
jarred/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a271e48e6 |
@@ -1,406 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Build and test Bun on macOS, Linux, and Windows.
|
||||
* @link https://buildkite.com/docs/pipelines/defining-steps
|
||||
*/
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
|
||||
function getEnv(name, required = true) {
|
||||
const value = process.env[name];
|
||||
|
||||
if (!value && required) {
|
||||
throw new Error(`Missing environment variable: ${name}`);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
function getRepository() {
|
||||
const url = getEnv("BUILDKITE_PULL_REQUEST_REPO", false) || getEnv("BUILDKITE_REPO");
|
||||
const match = url.match(/github.com\/([^/]+)\/([^/]+)\.git$/);
|
||||
if (!match) {
|
||||
throw new Error(`Unsupported repository: ${url}`);
|
||||
}
|
||||
const [, owner, repo] = match;
|
||||
return `${owner}/${repo}`;
|
||||
}
|
||||
|
||||
function getCommit() {
|
||||
return getEnv("BUILDKITE_COMMIT");
|
||||
}
|
||||
|
||||
function getBranch() {
|
||||
return getEnv("BUILDKITE_BRANCH");
|
||||
}
|
||||
|
||||
function getMainBranch() {
|
||||
return getEnv("BUILDKITE_PIPELINE_DEFAULT_BRANCH", false) || "main";
|
||||
}
|
||||
|
||||
function isFork() {
|
||||
const repository = getEnv("BUILDKITE_PULL_REQUEST_REPO", false);
|
||||
return !!repository && repository !== getEnv("BUILDKITE_REPO");
|
||||
}
|
||||
|
||||
function isMainBranch() {
|
||||
return getBranch() === getMainBranch() && !isFork();
|
||||
}
|
||||
|
||||
function isMergeQueue() {
|
||||
return /^gh-readonly-queue/.test(getEnv("BUILDKITE_BRANCH"));
|
||||
}
|
||||
|
||||
function isPullRequest() {
|
||||
return getEnv("BUILDKITE_PULL_REQUEST", false) === "true";
|
||||
}
|
||||
|
||||
async function getChangedFiles() {
|
||||
const repository = getRepository();
|
||||
const head = getCommit();
|
||||
const base = isMainBranch() ? `${head}^1` : getMainBranch();
|
||||
|
||||
try {
|
||||
const response = await fetch(`https://api.github.com/repos/${repository}/compare/${base}...${head}`);
|
||||
if (response.ok) {
|
||||
const { files } = await response.json();
|
||||
return files.filter(({ status }) => !/removed|unchanged/i.test(status)).map(({ filename }) => filename);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
function isDocumentation(filename) {
|
||||
return /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/.test(filename);
|
||||
}
|
||||
|
||||
function isTest(filename) {
|
||||
return /^test/.test(filename);
|
||||
}
|
||||
|
||||
function toYaml(obj, indent = 0) {
|
||||
const spaces = " ".repeat(indent);
|
||||
let result = "";
|
||||
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === null) {
|
||||
result += `${spaces}${key}: null\n`;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
result += `${spaces}${key}:\n`;
|
||||
value.forEach(item => {
|
||||
if (typeof item === "object" && item !== null) {
|
||||
result += `${spaces}- \n${toYaml(item, indent + 2)
|
||||
.split("\n")
|
||||
.map(line => `${spaces} ${line}`)
|
||||
.join("\n")}\n`;
|
||||
} else {
|
||||
result += `${spaces}- ${item}\n`;
|
||||
}
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (typeof value === "object") {
|
||||
result += `${spaces}${key}:\n${toYaml(value, indent + 2)}`;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
typeof value === "string" &&
|
||||
(value.includes(":") || value.includes("#") || value.includes("'") || value.includes('"') || value.includes("\n"))
|
||||
) {
|
||||
result += `${spaces}${key}: "${value.replace(/"/g, '\\"')}"\n`;
|
||||
continue;
|
||||
}
|
||||
|
||||
result += `${spaces}${key}: ${value}\n`;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function getPipeline() {
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
const getKey = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
if (baseline) {
|
||||
return `${os}-${arch}-baseline`;
|
||||
}
|
||||
|
||||
return `${os}-${arch}`;
|
||||
};
|
||||
|
||||
const getLabel = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
if (baseline) {
|
||||
return `:${os}: ${arch}-baseline`;
|
||||
}
|
||||
|
||||
return `:${os}: ${arch}`;
|
||||
};
|
||||
|
||||
// https://buildkite.com/docs/pipelines/command-step#retry-attributes
|
||||
const getRetry = (limit = 3) => {
|
||||
return {
|
||||
automatic: [
|
||||
{ exit_status: 1, limit: 1 },
|
||||
{ exit_status: -1, limit },
|
||||
{ exit_status: 255, limit },
|
||||
{ signal_reason: "agent_stop", limit },
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
// https://buildkite.com/docs/pipelines/managing-priorities
|
||||
const getPriority = () => {
|
||||
if (isFork()) {
|
||||
return -1;
|
||||
}
|
||||
if (isMainBranch()) {
|
||||
return 2;
|
||||
}
|
||||
if (isMergeQueue()) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Steps
|
||||
*/
|
||||
|
||||
const getBuildVendorStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-vendor`,
|
||||
label: `${getLabel(platform)} - build-vendor`,
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
queue: `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
},
|
||||
command: "bun run build:ci --target dependencies",
|
||||
};
|
||||
};
|
||||
|
||||
const getBuildCppStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-cpp`,
|
||||
label: `${getLabel(platform)} - build-cpp`,
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
queue: `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
};
|
||||
|
||||
const getBuildZigStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
const toolchain = baseline ? `${os}-${arch}-baseline` : `${os}-${arch}`;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-zig`,
|
||||
label: `${getLabel(platform)} - build-zig`,
|
||||
agents: {
|
||||
queue: "build-zig",
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
},
|
||||
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
|
||||
};
|
||||
};
|
||||
|
||||
const getBuildBunStep = platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-build-bun`,
|
||||
label: `${getLabel(platform)} - build-bun`,
|
||||
depends_on: [
|
||||
`${getKey(platform)}-build-vendor`,
|
||||
`${getKey(platform)}-build-cpp`,
|
||||
`${getKey(platform)}-build-zig`,
|
||||
],
|
||||
agents: {
|
||||
os,
|
||||
arch,
|
||||
queue: `build-${os}`,
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
};
|
||||
|
||||
const getTestBunStep = platform => {
|
||||
const { os, arch, distro, release } = platform;
|
||||
|
||||
let name;
|
||||
if (os === "darwin" || os === "windows") {
|
||||
name = getLabel(platform);
|
||||
} else {
|
||||
name = getLabel({ ...platform, os: distro });
|
||||
}
|
||||
|
||||
let agents;
|
||||
if (os === "darwin") {
|
||||
agents = { os, arch, queue: `test-darwin` };
|
||||
} else if (os === "windows") {
|
||||
agents = { os, arch, robobun: true };
|
||||
} else {
|
||||
agents = { os, arch, distro, release, robobun: true };
|
||||
}
|
||||
|
||||
let command;
|
||||
if (os === "windows") {
|
||||
command = `node .\\scripts\\runner.node.mjs --step ${getKey(platform)}-build-bun`;
|
||||
} else {
|
||||
command = `./scripts/runner.node.mjs --step ${getKey(platform)}-build-bun`;
|
||||
}
|
||||
|
||||
let parallelism;
|
||||
if (os === "darwin") {
|
||||
parallelism = 2;
|
||||
} else {
|
||||
parallelism = 10;
|
||||
}
|
||||
|
||||
return {
|
||||
key: `${getKey(platform)}-${distro}-${release.replace(/\./g, "")}-test-bun`,
|
||||
label: `${name} - test-bun`,
|
||||
depends_on: [`${getKey(platform)}-build-bun`],
|
||||
agents,
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
soft_fail: isMainBranch(),
|
||||
parallelism,
|
||||
command,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Config
|
||||
*/
|
||||
|
||||
const buildPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64" },
|
||||
{ os: "darwin", arch: "x64" },
|
||||
{ os: "linux", arch: "aarch64" },
|
||||
{ os: "linux", arch: "x64" },
|
||||
{ os: "linux", arch: "x64", baseline: true },
|
||||
{ os: "windows", arch: "x64" },
|
||||
{ os: "windows", arch: "x64", baseline: true },
|
||||
];
|
||||
|
||||
const testPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", distro: "sonoma", release: "14" },
|
||||
{ os: "darwin", arch: "aarch64", distro: "ventura", release: "13" },
|
||||
{ os: "darwin", arch: "x64", distro: "sonoma", release: "14" },
|
||||
{ os: "darwin", arch: "x64", distro: "ventura", release: "13" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", baseline: true },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", baseline: true },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", baseline: true },
|
||||
{ os: "windows", arch: "x64", distro: "server", release: "2019" },
|
||||
{ os: "windows", arch: "x64", distro: "server", release: "2019", baseline: true },
|
||||
];
|
||||
|
||||
return {
|
||||
priority: getPriority(),
|
||||
steps: [
|
||||
...buildPlatforms.map(platform => {
|
||||
const { os, arch, baseline } = platform;
|
||||
|
||||
return {
|
||||
key: getKey(platform),
|
||||
group: getLabel(platform),
|
||||
steps: [
|
||||
getBuildVendorStep(platform),
|
||||
getBuildCppStep(platform),
|
||||
getBuildZigStep(platform),
|
||||
getBuildBunStep(platform),
|
||||
...testPlatforms
|
||||
.filter(platform => platform.os === os && platform.arch === arch && baseline === platform.baseline)
|
||||
.map(platform => getTestBunStep(platform)),
|
||||
],
|
||||
};
|
||||
}),
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("Checking environment...");
|
||||
console.log(" - Repository:", getRepository());
|
||||
console.log(" - Branch:", getBranch());
|
||||
console.log(" - Commit:", getCommit());
|
||||
console.log(" - Is Main Branch:", isMainBranch());
|
||||
console.log(" - Is Merge Queue:", isMergeQueue());
|
||||
console.log(" - Is Pull Request:", isPullRequest());
|
||||
|
||||
const changedFiles = await getChangedFiles();
|
||||
if (changedFiles) {
|
||||
console.log(
|
||||
`Found ${changedFiles.length} changed files: \n${changedFiles.map(filename => ` - ${filename}`).join("\n")}`,
|
||||
);
|
||||
|
||||
if (changedFiles.every(filename => isDocumentation(filename))) {
|
||||
console.log("Since changed files are only documentation, skipping...");
|
||||
return;
|
||||
}
|
||||
|
||||
if (changedFiles.every(filename => isTest(filename) || isDocumentation(filename))) {
|
||||
// TODO: console.log("Since changed files contain tests, skipping build...");
|
||||
}
|
||||
}
|
||||
|
||||
const pipeline = getPipeline();
|
||||
const content = toYaml(pipeline);
|
||||
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
|
||||
writeFileSync(contentPath, content);
|
||||
|
||||
console.log("Generated pipeline:");
|
||||
console.log(" - Path:", contentPath);
|
||||
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
|
||||
}
|
||||
|
||||
await main();
|
||||
790
.buildkite/ci.yml
Normal file
790
.buildkite/ci.yml
Normal file
@@ -0,0 +1,790 @@
|
||||
# Build and test Bun on macOS, Linux, and Windows.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# If a step has the `robobun: true` label, robobun will listen
|
||||
# to webhooks from Buildkite and provision a VM to run the step.
|
||||
#
|
||||
# Changes to this file will be automatically uploaded on the next run
|
||||
# for a particular commit.
|
||||
|
||||
steps:
|
||||
# macOS aarch64
|
||||
- key: "darwin-aarch64"
|
||||
group: ":darwin: aarch64"
|
||||
steps:
|
||||
- key: "darwin-aarch64-build-deps"
|
||||
label: ":darwin: aarch64 - build-deps"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "darwin-aarch64-build-cpp"
|
||||
label: ":darwin: aarch64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
env:
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "darwin-aarch64-build-zig"
|
||||
label: ":darwin: aarch64 - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain darwin-aarch64"
|
||||
|
||||
- key: "darwin-aarch64-build-bun"
|
||||
label: ":darwin: aarch64 - build-bun"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-deps"
|
||||
- "darwin-aarch64-build-cpp"
|
||||
- "darwin-aarch64-build-zig"
|
||||
env:
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "darwin-aarch64-test-macos-14"
|
||||
label: ":darwin: 14 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
release: "14"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
|
||||
|
||||
- key: "darwin-aarch64-test-macos-13"
|
||||
label: ":darwin: 13 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "aarch64"
|
||||
release: "13"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-aarch64-build-bun"
|
||||
|
||||
# macOS x64
|
||||
- key: "darwin-x64"
|
||||
group: ":darwin: x64"
|
||||
steps:
|
||||
- key: "darwin-x64-build-deps"
|
||||
label: ":darwin: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "darwin-x64-build-cpp"
|
||||
label: ":darwin: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
env:
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "darwin-x64-build-zig"
|
||||
label: ":darwin: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain darwin-x64"
|
||||
|
||||
- key: "darwin-x64-build-bun"
|
||||
label: ":darwin: x64 - build-bun"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
depends_on:
|
||||
- "darwin-x64-build-deps"
|
||||
- "darwin-x64-build-cpp"
|
||||
- "darwin-x64-build-zig"
|
||||
env:
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "darwin-x64-test-macos-14"
|
||||
label: ":darwin: 14 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-x64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
release: "14"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
|
||||
|
||||
- key: "darwin-x64-test-macos-13"
|
||||
label: ":darwin: 13 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 3
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "darwin-x64-build-bun"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
os: "darwin"
|
||||
arch: "x64"
|
||||
release: "13"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step darwin-x64-build-bun"
|
||||
|
||||
# Linux x64
|
||||
- key: "linux-x64"
|
||||
group: ":linux: x64"
|
||||
steps:
|
||||
- key: "linux-x64-build-deps"
|
||||
label: ":linux: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "linux-x64-build-cpp"
|
||||
label: ":linux: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
env:
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-x64-build-zig"
|
||||
label: ":linux: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain linux-x64"
|
||||
|
||||
- key: "linux-x64-build-bun"
|
||||
label: ":linux: x64 - build-bun"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
depends_on:
|
||||
- "linux-x64-build-deps"
|
||||
- "linux-x64-build-cpp"
|
||||
- "linux-x64-build-zig"
|
||||
env:
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-x64-test-debian-12"
|
||||
label: ":debian: 12 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
- key: "linux-x64-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
- key: "linux-x64-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-build-bun"
|
||||
|
||||
# Linux x64-baseline
|
||||
- key: "linux-x64-baseline"
|
||||
group: ":linux: x64-baseline"
|
||||
steps:
|
||||
- key: "linux-x64-baseline-build-deps"
|
||||
label: ":linux: x64-baseline - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "linux-x64-baseline-build-cpp"
|
||||
label: ":linux: x64-baseline - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-x64-baseline-build-zig"
|
||||
label: ":linux: x64-baseline - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain linux-x64-baseline"
|
||||
|
||||
- key: "linux-x64-baseline-build-bun"
|
||||
label: ":linux: x64-baseline - build-bun"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-deps"
|
||||
- "linux-x64-baseline-build-cpp"
|
||||
- "linux-x64-baseline-build-zig"
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-x64-baseline-test-debian-12"
|
||||
label: ":debian: 12 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
- key: "linux-x64-baseline-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
- key: "linux-x64-baseline-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "x64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-x64-baseline-build-bun"
|
||||
|
||||
# Linux aarch64
|
||||
- key: "linux-aarch64"
|
||||
group: ":linux: aarch64"
|
||||
steps:
|
||||
- key: "linux-aarch64-build-deps"
|
||||
label: ":linux: aarch64 - build-deps"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "linux-aarch64-build-cpp"
|
||||
label: ":linux: aarch64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
env:
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-aarch64-build-zig"
|
||||
label: ":linux: aarch64 - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain linux-aarch64"
|
||||
|
||||
- key: "linux-aarch64-build-bun"
|
||||
label: ":linux: aarch64 - build-bun"
|
||||
agents:
|
||||
queue: "build-linux"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
depends_on:
|
||||
- "linux-aarch64-build-deps"
|
||||
- "linux-aarch64-build-cpp"
|
||||
- "linux-aarch64-build-zig"
|
||||
env:
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "linux-aarch64-test-debian-12"
|
||||
label: ":debian: 12 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "debian"
|
||||
release: "12"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
- key: "linux-aarch64-test-ubuntu-2204"
|
||||
label: ":ubuntu: 22.04 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "ubuntu"
|
||||
release: "22.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
- key: "linux-aarch64-test-ubuntu-2004"
|
||||
label: ":ubuntu: 20.04 aarch64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 2
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 1
|
||||
limit: 1
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "linux-aarch64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "linux"
|
||||
arch: "aarch64"
|
||||
distro: "ubuntu"
|
||||
release: "20.04"
|
||||
command:
|
||||
- "./scripts/runner.node.mjs --step linux-aarch64-build-bun"
|
||||
|
||||
# Windows x64
|
||||
- key: "windows-x64"
|
||||
group: ":windows: x64"
|
||||
steps:
|
||||
- key: "windows-x64-build-deps"
|
||||
label: ":windows: x64 - build-deps"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "windows-x64-build-cpp"
|
||||
label: ":windows: x64 - build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
env:
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "windows-x64-build-zig"
|
||||
label: ":windows: x64 - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain windows-x64"
|
||||
|
||||
- key: "windows-x64-build-bun"
|
||||
label: ":windows: x64 - build-bun"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
depends_on:
|
||||
- "windows-x64-build-deps"
|
||||
- "windows-x64-build-cpp"
|
||||
- "windows-x64-build-zig"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
env:
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "windows-x64-test-bun"
|
||||
label: ":windows: x64 - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 1
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "windows-x64-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "node .\\scripts\\runner.node.mjs --step windows-x64-build-bun"
|
||||
|
||||
# Windows x64-baseline
|
||||
- key: "windows-x64-baseline"
|
||||
group: ":windows: x64-baseline"
|
||||
steps:
|
||||
- key: "windows-x64-baseline-build-deps"
|
||||
label: ":windows: x64-baseline - build-deps"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target dependencies"
|
||||
|
||||
- key: "windows-x64-baseline-build-cpp"
|
||||
label: ":windows: x64-baseline - build-cpp"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
BUN_CPP_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "windows-x64-baseline-build-zig"
|
||||
label: ":windows: x64-baseline - build-zig"
|
||||
agents:
|
||||
queue: "build-zig"
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun-zig --toolchain windows-x64-baseline"
|
||||
|
||||
- key: "windows-x64-baseline-build-bun"
|
||||
label: ":windows: x64-baseline - build-bun"
|
||||
agents:
|
||||
queue: "build-windows"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
depends_on:
|
||||
- "windows-x64-baseline-build-deps"
|
||||
- "windows-x64-baseline-build-cpp"
|
||||
- "windows-x64-baseline-build-zig"
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: 255
|
||||
limit: 5
|
||||
env:
|
||||
ENABLE_BASELINE: "ON"
|
||||
BUN_LINK_ONLY: "ON"
|
||||
command:
|
||||
- "bun run build:ci --target bun"
|
||||
|
||||
- key: "windows-x64-baseline-test-bun"
|
||||
label: ":windows: x64-baseline - test-bun"
|
||||
if: "build.branch != 'main'"
|
||||
parallelism: 10
|
||||
soft_fail:
|
||||
- exit_status: 1
|
||||
retry:
|
||||
automatic:
|
||||
- exit_status: -1
|
||||
limit: 3
|
||||
- exit_status: 255
|
||||
limit: 3
|
||||
- signal_reason: agent_stop
|
||||
limit: 3
|
||||
- signal: SIGTERM
|
||||
limit: 3
|
||||
depends_on:
|
||||
- "windows-x64-baseline-build-bun"
|
||||
agents:
|
||||
robobun: "true"
|
||||
os: "windows"
|
||||
arch: "x64"
|
||||
command:
|
||||
- "node .\\scripts\\runner.node.mjs --step windows-x64-baseline-build-bun"
|
||||
@@ -29,10 +29,6 @@ function assert_curl() {
|
||||
assert_command "curl" "curl" "https://curl.se/download.html"
|
||||
}
|
||||
|
||||
function assert_node() {
|
||||
assert_command "node" "node" "https://nodejs.org/en/download/"
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
@@ -96,12 +92,6 @@ assert_build
|
||||
assert_buildkite_agent
|
||||
assert_jq
|
||||
assert_curl
|
||||
assert_node
|
||||
assert_release
|
||||
assert_canary
|
||||
|
||||
run_command node ".buildkite/ci.mjs"
|
||||
|
||||
if [ -f ".buildkite/ci.yml" ]; then
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
fi
|
||||
upload_buildkite_pipeline ".buildkite/ci.yml"
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -49,5 +49,3 @@ vendor/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
test/js/bun/css/files linguist-vendored
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: 🇹 TypeScript Type Bug Report
|
||||
description: Report an issue with TypeScript types
|
||||
labels: [bug, types]
|
||||
labels: [bug, typescript]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -11,8 +11,8 @@ body:
|
||||
- type: textarea
|
||||
id: package_json
|
||||
attributes:
|
||||
label: "`package.json` file"
|
||||
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
|
||||
label: `package.json` file
|
||||
description: Can you upload your `package.json` file? This helps us reproduce the crash.
|
||||
render: json
|
||||
- type: textarea
|
||||
id: repro
|
||||
|
||||
4
.github/workflows/clang-format.yml
vendored
4
.github/workflows/clang-format.yml
vendored
@@ -7,7 +7,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
@@ -32,9 +31,10 @@ jobs:
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Clang Format
|
||||
env:
|
||||
ENABLE_CCACHE: OFF
|
||||
LLVM_VERSION: ${{ env.LLVM_VERSION }}
|
||||
run: |
|
||||
bun run clang-format
|
||||
bun run clang-format:diff
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
|
||||
2
.github/workflows/clang-tidy.yml
vendored
2
.github/workflows/clang-tidy.yml
vendored
@@ -7,7 +7,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
@@ -32,6 +31,7 @@ jobs:
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Clang Tidy
|
||||
env:
|
||||
ENABLE_CCACHE: OFF
|
||||
LLVM_VERSION: ${{ env.LLVM_VERSION }}
|
||||
run: |
|
||||
bun run clang-tidy:diff
|
||||
|
||||
58
.github/workflows/labeled.yml
vendored
58
.github/workflows/labeled.yml
vendored
@@ -83,26 +83,6 @@ jobs:
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
|
||||
shell: bash
|
||||
run: |
|
||||
bun scripts/associate-issue-with-sentry.ts
|
||||
|
||||
if [[ -f "sentry-link.txt" ]]; then
|
||||
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "sentry-id.txt" ]]; then
|
||||
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash'
|
||||
@@ -112,7 +92,7 @@ jobs:
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -126,40 +106,6 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Thank you for reporting this crash.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment needs repro
|
||||
if: github.event.label.name == 'needs repro'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
@@ -168,4 +114,4 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), or [CodeSandbox](https://codesandbox.io/templates/bun). Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
|
||||
4
.github/workflows/prettier-format.yml
vendored
4
.github/workflows/prettier-format.yml
vendored
@@ -7,7 +7,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
@@ -29,6 +28,9 @@ jobs:
|
||||
run: |
|
||||
bun install
|
||||
- name: Prettier Format
|
||||
env:
|
||||
ENABLE_CCACHE: OFF
|
||||
LLVM_VERSION: "ignore"
|
||||
run: |
|
||||
bun run prettier:diff
|
||||
- name: Commit
|
||||
|
||||
4
.github/workflows/zig-format.yml
vendored
4
.github/workflows/zig-format.yml
vendored
@@ -7,7 +7,6 @@ on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
@@ -26,6 +25,9 @@ jobs:
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Zig Format
|
||||
env:
|
||||
ENABLE_CCACHE: OFF
|
||||
LLVM_VERSION: "ignore"
|
||||
run: |
|
||||
bun run zig-format:diff
|
||||
- name: Commit
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -141,7 +141,6 @@ test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
@@ -164,7 +163,3 @@ src/bake/generated.ts
|
||||
/src/deps/zstd
|
||||
/src/deps/zlib
|
||||
/src/deps/zig
|
||||
|
||||
# Generated files
|
||||
|
||||
.buildkite/ci.yml
|
||||
|
||||
74
.vscode/launch.json
generated
vendored
74
.vscode/launch.json
generated
vendored
@@ -14,7 +14,7 @@
|
||||
"name": "bun test [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -29,7 +29,7 @@
|
||||
"name": "bun test [file] --only",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -50,7 +50,7 @@
|
||||
"name": "bun test [file] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -65,7 +65,7 @@
|
||||
"name": "bun test [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "0",
|
||||
@@ -80,7 +80,7 @@
|
||||
"name": "bun test [file] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--watch", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -95,7 +95,7 @@
|
||||
"name": "bun test [file] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--hot", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -110,7 +110,7 @@
|
||||
"name": "bun test [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -131,7 +131,7 @@
|
||||
"name": "bun test [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -172,10 +172,6 @@
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
|
||||
"BUN_DEBUG_IncrementalGraph": "1",
|
||||
"BUN_DEBUG_Bake": "1",
|
||||
"BUN_DEBUG_reload_file_list": "1",
|
||||
"GOMAXPROCS": "1",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
},
|
||||
@@ -268,7 +264,7 @@
|
||||
"name": "bun test [...]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -283,7 +279,7 @@
|
||||
"name": "bun test [...] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -298,7 +294,7 @@
|
||||
"name": "bun test [...] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -313,7 +309,7 @@
|
||||
"name": "bun test [...] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -328,7 +324,7 @@
|
||||
"name": "bun test [...] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -343,7 +339,7 @@
|
||||
"name": "bun test [...] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -364,7 +360,7 @@
|
||||
"name": "bun test [...] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -401,7 +397,7 @@
|
||||
"name": "bun test [*]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -415,7 +411,7 @@
|
||||
"name": "bun test [*] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -429,7 +425,7 @@
|
||||
"name": "bun test [*] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"env": {
|
||||
"FORCE_COLOR": "1",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
@@ -481,7 +477,7 @@
|
||||
"name": "Windows: bun test [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -510,7 +506,7 @@
|
||||
"name": "Windows: bun test --only [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--only", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -539,7 +535,7 @@
|
||||
"name": "Windows: bun test [file] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -568,7 +564,7 @@
|
||||
"name": "Windows: bun test [file] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -597,7 +593,7 @@
|
||||
"name": "Windows: bun test [file] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -635,7 +631,7 @@
|
||||
"name": "Windows: bun test [file] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -822,7 +818,7 @@
|
||||
"name": "Windows: bun test [...]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -851,7 +847,7 @@
|
||||
"name": "Windows: bun test [...] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -880,7 +876,7 @@
|
||||
"name": "Windows: bun test [...] (verbose)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -909,7 +905,7 @@
|
||||
"name": "Windows: bun test [...] --watch",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--watch", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -938,7 +934,7 @@
|
||||
"name": "Windows: bun test [...] --hot",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "--hot", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -967,7 +963,7 @@
|
||||
"name": "Windows: bun test [...] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -1005,7 +1001,7 @@
|
||||
"name": "Windows: bun test [...] --inspect-brk",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test", "${input:testName}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -1070,7 +1066,7 @@
|
||||
"name": "Windows: bun test [*]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -1095,7 +1091,7 @@
|
||||
"name": "Windows: bun test [*] (fast)",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -1124,7 +1120,7 @@
|
||||
"name": "Windows: bun test [*] --inspect",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug.exe",
|
||||
"args": ["test"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${workspaceFolder}/test",
|
||||
"environment": [
|
||||
{
|
||||
"name": "FORCE_COLOR",
|
||||
@@ -1196,4 +1192,4 @@
|
||||
"description": "Usage: bun test [...]",
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
@@ -43,6 +43,8 @@ include(BuildBun)
|
||||
|
||||
# --- Analysis ---
|
||||
|
||||
optionx(ENABLE_ANALYSIS BOOL "If analysis targets should be enabled" DEFAULT OFF)
|
||||
|
||||
if(ENABLE_ANALYSIS)
|
||||
include(RunClangFormat)
|
||||
include(RunClangTidy)
|
||||
|
||||
@@ -30,7 +30,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
|
||||
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -77,8 +77,8 @@ $ sudo pacman -S llvm clang lld
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install 'dnf-command(copr)'
|
||||
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
|
||||
$ sudo dnf install llvm16 clang16 lld16-devel
|
||||
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
|
||||
$ sudo dnf install llvm clang lld
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
@@ -133,9 +133,9 @@ x.y.z_debug
|
||||
|
||||
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
|
||||
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./vendor/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension).
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension).
|
||||
|
||||
We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-debug` in your terminal:
|
||||
We recommend adding `./build` to your `$PATH` so that you can run `bun-debug` in your terminal:
|
||||
|
||||
```sh
|
||||
$ bun-debug
|
||||
@@ -164,7 +164,7 @@ To compile a release build of Bun, run:
|
||||
$ bun run build:release
|
||||
```
|
||||
|
||||
The binary will be located at `./build/release/bun` and `./build/release/bun-profile`.
|
||||
The binary will be located at `./build-release/bun` and `./build-release/bun-profile`.
|
||||
|
||||
### Download release build from pull requests
|
||||
|
||||
@@ -173,8 +173,8 @@ To save you time spent building a release build locally, we provide a way to run
|
||||
To run a release build from a pull request, you can use the `bun-pr` npm package:
|
||||
|
||||
```sh
|
||||
bunx bun-pr <pr-number>
|
||||
bunx bun-pr <branch-name>
|
||||
bunx bun-pr pr-number
|
||||
bunx bun-pr branch/branch-name
|
||||
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
|
||||
```
|
||||
|
||||
@@ -206,18 +206,24 @@ $ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
|
||||
## Building WebKit locally + Debug mode of JSC
|
||||
|
||||
{% callout %}
|
||||
|
||||
**TODO**: This is out of date. TLDR is pass `-DUSE_DEBUG_JSC=1` or `-DWEBKIT_DIR=...` to CMake. it will probably need more fiddling. ask @paperdave if you need this.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
|
||||
|
||||
```bash
|
||||
# Clone WebKit into ./vendor/WebKit
|
||||
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
# once you run this, `make submodule` can be used to automatically
|
||||
# update WebKit and the other submodules
|
||||
$ git submodule update --init --depth 1 --checkout src/bun.js/WebKit
|
||||
# to make a jsc release build
|
||||
$ make jsc
|
||||
# JSC debug build does not work perfectly with Bun yet, this is actively being
|
||||
# worked on and will eventually become the default.
|
||||
$ make jsc-build-linux-compile-debug cpp
|
||||
$ make jsc-build-mac-compile-debug cpp
|
||||
```
|
||||
|
||||
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"scripts": {
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"deps": "cd src && bun run deps",
|
||||
"build": "cd src && bun run build",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
syntax = "proto3";
|
||||
package benchmark;
|
||||
|
||||
service BenchmarkService {
|
||||
rpc Ping(Request) returns (Response);
|
||||
}
|
||||
|
||||
message Request {
|
||||
string message = 1;
|
||||
}
|
||||
|
||||
message Response {
|
||||
string message = 1;
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL
|
||||
BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
|
||||
bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j
|
||||
YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE
|
||||
BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD
|
||||
VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN
|
||||
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6
|
||||
LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/
|
||||
cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia
|
||||
SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX
|
||||
InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8
|
||||
RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr
|
||||
uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ
|
||||
x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ
|
||||
hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw
|
||||
5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR
|
||||
Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G
|
||||
TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV
|
||||
FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF
|
||||
MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
|
||||
AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11
|
||||
jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0
|
||||
GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H
|
||||
HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb
|
||||
P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99
|
||||
p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p
|
||||
OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo
|
||||
Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn
|
||||
Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB
|
||||
n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK
|
||||
qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL
|
||||
-----END CERTIFICATE-----
|
||||
@@ -1,31 +0,0 @@
|
||||
const grpc = require("@grpc/grpc-js");
|
||||
const protoLoader = require("@grpc/proto-loader");
|
||||
const packageDefinition = protoLoader.loadSync("benchmark.proto", {});
|
||||
const proto = grpc.loadPackageDefinition(packageDefinition).benchmark;
|
||||
const fs = require("fs");
|
||||
|
||||
function ping(call, callback) {
|
||||
callback(null, { message: "Hello, World" });
|
||||
}
|
||||
|
||||
function main() {
|
||||
const server = new grpc.Server();
|
||||
server.addService(proto.BenchmarkService.service, { ping: ping });
|
||||
const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true");
|
||||
const port = process.env.PORT || 50051;
|
||||
const host = process.env.HOST || "localhost";
|
||||
let credentials;
|
||||
if (tls) {
|
||||
const ca = fs.readFileSync("./cert.pem");
|
||||
const key = fs.readFileSync("./key.pem");
|
||||
const cert = fs.readFileSync("./cert.pem");
|
||||
credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]);
|
||||
} else {
|
||||
credentials = grpc.ServerCredentials.createInsecure();
|
||||
}
|
||||
server.bindAsync(`${host}:${port}`, credentials, () => {
|
||||
console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,52 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN
|
||||
THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678
|
||||
menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP
|
||||
BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL
|
||||
ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf
|
||||
v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t
|
||||
D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV
|
||||
SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS
|
||||
8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA
|
||||
TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV
|
||||
4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB
|
||||
IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc
|
||||
wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV
|
||||
SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa
|
||||
WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ
|
||||
8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t
|
||||
/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3
|
||||
cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u
|
||||
RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5
|
||||
ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9
|
||||
uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc
|
||||
Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0
|
||||
8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs
|
||||
B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt
|
||||
otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS
|
||||
VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS
|
||||
TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO
|
||||
z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J
|
||||
P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO
|
||||
auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r
|
||||
hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD
|
||||
GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD
|
||||
Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+
|
||||
Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw
|
||||
/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo
|
||||
+qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD
|
||||
UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY
|
||||
aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG
|
||||
wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP
|
||||
BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr
|
||||
vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF
|
||||
kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r
|
||||
QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K
|
||||
Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8
|
||||
oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf
|
||||
Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO
|
||||
eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl
|
||||
VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f
|
||||
kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD
|
||||
z/cCLOrUJfealezimyd8SKPWPeHhrA==
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bun:server": "TLS=1 PORT=50051 bun ./index.js",
|
||||
"node:server": "TLS=1 PORT=50051 node ./index.js",
|
||||
"bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051",
|
||||
"bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "1.12.0",
|
||||
"@grpc/proto-loader": "0.7.10"
|
||||
}
|
||||
}
|
||||
@@ -3,9 +3,9 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js | grep iter",
|
||||
"bench:node": "node node.mjs | grep iter",
|
||||
"bench:deno": "deno run -A --unstable deno.mjs | grep iter",
|
||||
"bench:bun": "$BUN bun.js | grep iter",
|
||||
"bench:node": "$NODE node.mjs | grep iter",
|
||||
"bench:deno": "$DENO run -A --unstable deno.mjs | grep iter",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench": "bun run bench:bun && bun run bench:node"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { bench, run } from "./runner.mjs";
|
||||
|
||||
for (let size of [32, 2048, 1024 * 16, 1024 * 1024 * 2, 1024 * 1024 * 16]) {
|
||||
for (let fillSize of [4, 8, 16, 11]) {
|
||||
const buffer = Buffer.allocUnsafe(size);
|
||||
|
||||
const pattern = "x".repeat(fillSize);
|
||||
|
||||
bench(`Buffer.fill ${size} bytes with ${fillSize} byte value`, () => {
|
||||
buffer.fill(pattern);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -5,10 +5,10 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"deps": "npm install && bash src/download.sh",
|
||||
"bench:deno": "deno run -A --unstable-ffi deno.js",
|
||||
"bench:deno": "$DENO run -A --unstable-ffi deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
65
build.zig
65
build.zig
@@ -52,14 +52,14 @@ const BunBuildOptions = struct {
|
||||
/// instead of at compile time. This is disabled in release or if this flag
|
||||
/// is set (to allow CI to build a portable executable). Affected files:
|
||||
///
|
||||
/// - src/bake/runtime.ts (bundled)
|
||||
/// - src/kit/runtime.ts (bundled)
|
||||
/// - src/bun.js/api/FFI.h
|
||||
///
|
||||
/// A similar technique is used in C++ code for JavaScript builtins
|
||||
codegen_embed: bool = false,
|
||||
force_embed_code: bool = false,
|
||||
|
||||
/// `./build/codegen` or equivalent
|
||||
codegen_path: []const u8,
|
||||
generated_code_dir: []const u8,
|
||||
no_llvm: bool,
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
@@ -71,7 +71,7 @@ const BunBuildOptions = struct {
|
||||
}
|
||||
|
||||
pub fn shouldEmbedCode(opts: *const BunBuildOptions) bool {
|
||||
return opts.optimize != .Debug or opts.codegen_embed;
|
||||
return opts.optimize != .Debug or opts.force_embed_code;
|
||||
}
|
||||
|
||||
pub fn buildOptionsModule(this: *BunBuildOptions, b: *Build) *Module {
|
||||
@@ -83,10 +83,10 @@ const BunBuildOptions = struct {
|
||||
opts.addOption([]const u8, "base_path", b.pathFromRoot("."));
|
||||
opts.addOption([]const u8, "codegen_path", std.fs.path.resolve(b.graph.arena, &.{
|
||||
b.build_root.path.?,
|
||||
this.codegen_path,
|
||||
this.generated_code_dir,
|
||||
}) catch @panic("OOM"));
|
||||
|
||||
opts.addOption(bool, "codegen_embed", this.shouldEmbedCode());
|
||||
opts.addOption(bool, "embed_code", this.shouldEmbedCode());
|
||||
opts.addOption(u32, "canary_revision", this.canary_revision orelse 0);
|
||||
opts.addOption(bool, "is_canary", this.canary_revision != null);
|
||||
opts.addOption(Version, "version", this.version);
|
||||
@@ -195,13 +195,12 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
const target = b.resolveTargetQuery(target_query);
|
||||
|
||||
const codegen_path = b.pathFromRoot(
|
||||
b.option([]const u8, "codegen_path", "Set the generated code directory") orelse
|
||||
const generated_code_dir = b.pathFromRoot(
|
||||
b.option([]const u8, "generated-code", "Set the generated code directory") orelse
|
||||
"build/debug/codegen",
|
||||
);
|
||||
const codegen_embed = b.option(bool, "codegen_embed", "If codegen files should be embedded in the binary") orelse false;
|
||||
|
||||
const bun_version = b.option([]const u8, "version", "Value of `Bun.version`") orelse "0.0.0";
|
||||
const force_embed_js_code = b.option(bool, "force_embed_js_code", "Always embed JavaScript builtins") orelse false;
|
||||
|
||||
b.reference_trace = ref_trace: {
|
||||
const trace = b.option(u32, "reference-trace", "Set the reference trace") orelse 16;
|
||||
@@ -219,8 +218,8 @@ pub fn build(b: *Build) !void {
|
||||
.os = os,
|
||||
.arch = arch,
|
||||
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.generated_code_dir = generated_code_dir,
|
||||
.force_embed_code = force_embed_js_code,
|
||||
.no_llvm = no_llvm,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
@@ -352,7 +351,7 @@ pub inline fn addMultiCheck(
|
||||
.tracy_callstack_depth = root_build_options.tracy_callstack_depth,
|
||||
.version = root_build_options.version,
|
||||
.reported_nodejs_version = root_build_options.reported_nodejs_version,
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.generated_code_dir = root_build_options.generated_code_dir,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
};
|
||||
|
||||
@@ -476,45 +475,13 @@ fn addInternalPackages(b: *Build, obj: *Compile, opts: *BunBuildOptions) void {
|
||||
.{ .file = "ZigGeneratedClasses.zig", .import = "ZigGeneratedClasses" },
|
||||
.{ .file = "ResolvedSourceTag.zig", .import = "ResolvedSourceTag" },
|
||||
.{ .file = "ErrorCode.zig", .import = "ErrorCode" },
|
||||
.{ .file = "runtime.out.js" },
|
||||
.{ .file = "bake.client.js", .import = "bake-codegen/bake.client.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "bake.error.js", .import = "bake-codegen/bake.error.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "bake.server.js", .import = "bake-codegen/bake.server.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "bun-error/index.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "bun-error/bun-error.css", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "fallback-decoder.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/assert.js" },
|
||||
.{ .file = "node-fallbacks/buffer.js" },
|
||||
.{ .file = "node-fallbacks/console.js" },
|
||||
.{ .file = "node-fallbacks/constants.js" },
|
||||
.{ .file = "node-fallbacks/crypto.js" },
|
||||
.{ .file = "node-fallbacks/domain.js" },
|
||||
.{ .file = "node-fallbacks/events.js" },
|
||||
.{ .file = "node-fallbacks/http.js" },
|
||||
.{ .file = "node-fallbacks/https.js" },
|
||||
.{ .file = "node-fallbacks/net.js" },
|
||||
.{ .file = "node-fallbacks/os.js" },
|
||||
.{ .file = "node-fallbacks/path.js" },
|
||||
.{ .file = "node-fallbacks/process.js" },
|
||||
.{ .file = "node-fallbacks/punycode.js" },
|
||||
.{ .file = "node-fallbacks/querystring.js" },
|
||||
.{ .file = "node-fallbacks/stream.js" },
|
||||
.{ .file = "node-fallbacks/string_decoder.js" },
|
||||
.{ .file = "node-fallbacks/sys.js" },
|
||||
.{ .file = "node-fallbacks/timers.js" },
|
||||
.{ .file = "node-fallbacks/tty.js" },
|
||||
.{ .file = "node-fallbacks/url.js" },
|
||||
.{ .file = "node-fallbacks/util.js" },
|
||||
.{ .file = "node-fallbacks/zlib.js" },
|
||||
.{ .file = "kit.client.js", .import = "kit-codegen/kit.client.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "kit.server.js", .import = "kit-codegen/kit.server.js", .enable = opts.shouldEmbedCode() },
|
||||
}) |entry| {
|
||||
if (!@hasField(@TypeOf(entry), "enable") or entry.enable) {
|
||||
const path = b.pathJoin(&.{ opts.codegen_path, entry.file });
|
||||
const path = b.pathJoin(&.{ opts.generated_code_dir, entry.file });
|
||||
validateGeneratedPath(path);
|
||||
const import_path = if (@hasField(@TypeOf(entry), "import"))
|
||||
entry.import
|
||||
else
|
||||
entry.file;
|
||||
obj.root_module.addAnonymousImport(import_path, .{
|
||||
obj.root_module.addAnonymousImport(entry.import, .{
|
||||
.root_source_file = .{ .cwd_relative = path },
|
||||
});
|
||||
}
|
||||
|
||||
84
ci/README.md
84
ci/README.md
@@ -1,84 +0,0 @@
|
||||
# CI
|
||||
|
||||
This directory contains scripts for building CI images for Bun.
|
||||
|
||||
## Building
|
||||
|
||||
### `macOS`
|
||||
|
||||
On macOS, images are built using [`tart`](https://tart.run/), a tool that abstracts over the [`Virtualization.Framework`](https://developer.apple.com/documentation/virtualization) APIs, to run macOS VMs.
|
||||
|
||||
To install the dependencies required, run:
|
||||
|
||||
```sh
|
||||
$ cd ci
|
||||
$ bun run bootstrap
|
||||
```
|
||||
|
||||
To build a vanilla macOS VM, run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla
|
||||
```
|
||||
|
||||
This builds a vanilla macOS VM with the current macOS release on your machine. It runs scripts to disable things like spotlight and siri, but it does not install any software.
|
||||
|
||||
> Note: The image size is 50GB, so make sure you have enough disk space.
|
||||
|
||||
If you want to build a specific macOS release, you can run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla-15
|
||||
```
|
||||
|
||||
> Note: You cannot build a newer release of macOS on an older macOS machine.
|
||||
|
||||
To build a macOS VM with software installed to build and test Bun, run:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
### `macOS`
|
||||
|
||||
## How To
|
||||
|
||||
### Support a new macOS release
|
||||
|
||||
1. Visit [`ipsw.me`](https://ipsw.me/VirtualMac2,1) and find the IPSW of the macOS release you want to build.
|
||||
|
||||
2. Add an entry to [`ci/darwin/variables.pkr.hcl`](/ci/darwin/variables.pkr.hcl) with the following format:
|
||||
|
||||
```hcl
|
||||
sonoma = {
|
||||
distro = "sonoma"
|
||||
release = "15"
|
||||
ipsw = "https://updates.cdn-apple.com/..."
|
||||
}
|
||||
```
|
||||
|
||||
3. Add matching scripts to [`ci/package.json`](/ci/package.json) to build the image, then test it:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-vanilla-15
|
||||
```
|
||||
|
||||
> Note: If you need to troubleshoot the build, you can remove the `headless = true` property from [`ci/darwin/image-vanilla.pkr.hcl`](/ci/darwin/image-vanilla.pkr.hcl) and the VM's screen will be displayed.
|
||||
|
||||
4. Test and build the non-vanilla image:
|
||||
|
||||
```sh
|
||||
$ bun run build:darwin-aarch64-15
|
||||
```
|
||||
|
||||
This will use the vanilla image and run the [`scripts/bootstrap.sh`](/scripts/bootstrap.sh) script to install the required software to build and test Bun.
|
||||
|
||||
5. Publish the images:
|
||||
|
||||
```sh
|
||||
$ bun run login
|
||||
$ bun run publish:darwin-aarch64-vanilla-15
|
||||
$ bun run publish:darwin-aarch64-15
|
||||
```
|
||||
@@ -1,46 +0,0 @@
|
||||
# Generates a vanilla macOS VM with optimized settings for virtualized environments.
|
||||
# See login.sh and optimize.sh for details.
|
||||
|
||||
data "external-raw" "boot-script" {
|
||||
program = ["sh", "-c", templatefile("scripts/boot-image.sh", var)]
|
||||
}
|
||||
|
||||
source "tart-cli" "bun-darwin-aarch64-vanilla" {
|
||||
vm_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
|
||||
from_ipsw = local.release.ipsw
|
||||
cpu_count = local.cpu_count
|
||||
memory_gb = local.memory_gb
|
||||
disk_size_gb = local.disk_size_gb
|
||||
ssh_username = local.username
|
||||
ssh_password = local.password
|
||||
ssh_timeout = "120s"
|
||||
create_grace_time = "30s"
|
||||
boot_command = split("\n", data.external-raw.boot-script.result)
|
||||
headless = true # Disable if you need to debug why the boot_command is not working
|
||||
}
|
||||
|
||||
build {
|
||||
sources = ["source.tart-cli.bun-darwin-aarch64-vanilla"]
|
||||
|
||||
provisioner "file" {
|
||||
content = file("scripts/setup-login.sh")
|
||||
destination = "/tmp/setup-login.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["echo \"${local.password}\" | sudo -S sh -c 'sh /tmp/setup-login.sh \"${local.username}\" \"${local.password}\"'"]
|
||||
}
|
||||
|
||||
provisioner "file" {
|
||||
content = file("scripts/optimize-machine.sh")
|
||||
destination = "/tmp/optimize-machine.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo sh /tmp/optimize-machine.sh"]
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo rm -rf /tmp/*"]
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
# Generates a macOS VM with software installed to build and test Bun.
|
||||
|
||||
source "tart-cli" "bun-darwin-aarch64" {
|
||||
vm_name = "bun-darwin-aarch64-${local.release.distro}-${local.release.release}"
|
||||
vm_base_name = "bun-darwin-aarch64-vanilla-${local.release.distro}-${local.release.release}"
|
||||
cpu_count = local.cpu_count
|
||||
memory_gb = local.memory_gb
|
||||
disk_size_gb = local.disk_size_gb
|
||||
ssh_username = local.username
|
||||
ssh_password = local.password
|
||||
ssh_timeout = "120s"
|
||||
headless = true
|
||||
}
|
||||
|
||||
build {
|
||||
sources = ["source.tart-cli.bun-darwin-aarch64"]
|
||||
|
||||
provisioner "file" {
|
||||
content = file("../../scripts/bootstrap.sh")
|
||||
destination = "/tmp/bootstrap.sh"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["CI=true sh /tmp/bootstrap.sh"]
|
||||
}
|
||||
|
||||
provisioner "file" {
|
||||
source = "darwin/plists/"
|
||||
destination = "/tmp/"
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = [
|
||||
"sudo ls /tmp/",
|
||||
"sudo mv /tmp/*.plist /Library/LaunchDaemons/",
|
||||
"sudo chown root:wheel /Library/LaunchDaemons/*.plist",
|
||||
"sudo chmod 644 /Library/LaunchDaemons/*.plist",
|
||||
]
|
||||
}
|
||||
|
||||
provisioner "shell" {
|
||||
inline = ["sudo rm -rf /tmp/*"]
|
||||
}
|
||||
}
|
||||
@@ -1,44 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.buildkite.buildkite-agent</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/buildkite-agent</string>
|
||||
<string>start</string>
|
||||
</array>
|
||||
|
||||
<key>KeepAlive</key>
|
||||
<dict>
|
||||
<key>SuccessfulExit</key>
|
||||
<false />
|
||||
</dict>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
|
||||
<key>StandardOutPath</key>
|
||||
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
|
||||
|
||||
<key>StandardErrorPath</key>
|
||||
<string>/var/buildkite-agent/logs/buildkite-agent.log</string>
|
||||
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>BUILDKITE_AGENT_CONFIG</key>
|
||||
<string>/etc/buildkite-agent/buildkite-agent.cfg</string>
|
||||
</dict>
|
||||
|
||||
<key>LimitLoadToSessionType</key>
|
||||
<array>
|
||||
<string>Aqua</string>
|
||||
<string>LoginWindow</string>
|
||||
<string>Background</string>
|
||||
<string>StandardIO</string>
|
||||
<string>System</string>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,20 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.tailscale.tailscaled</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/tailscale</string>
|
||||
<string>up</string>
|
||||
<string>--ssh</string>
|
||||
<string>--authkey</string>
|
||||
<string>${TAILSCALE_AUTHKEY}</string>
|
||||
</array>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,16 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>com.tailscale.tailscaled</string>
|
||||
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>/usr/local/bin/tailscaled</string>
|
||||
</array>
|
||||
|
||||
<key>RunAtLoad</key>
|
||||
<true />
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,124 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script generates the boot commands for the macOS installer GUI.
|
||||
# It is run on your local machine, not inside the VM.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/cirruslabs/macos-image-templates/blob/master/templates/vanilla-sequoia.pkr.hcl
|
||||
|
||||
if ! [ "${release}" ] || ! [ "${username}" ] || ! [ "${password}" ]; then
|
||||
echo "Script must be run with variables: release, username, and password" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Hello, hola, bonjour, etc.
|
||||
echo "<wait120s><spacebar>"
|
||||
|
||||
# Select Your Country and Region
|
||||
echo "<wait30s>italiano<esc>english<enter>"
|
||||
echo "<wait30s>united states<leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Written and Spoken Languages
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Accessibility
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Data & Privacy
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Migration Assistant
|
||||
echo "<wait30s><tab><tab><tab><spacebar>"
|
||||
|
||||
# Sign In with Your Apple ID
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Are you sure you want to skip signing in with an Apple ID?
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Terms and Conditions
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# I have read and agree to the macOS Software License Agreement
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Create a Computer Account
|
||||
echo "<wait30s>${username}<tab><tab>${password}<tab>${password}<tab><tab><tab><spacebar>"
|
||||
|
||||
# Enable Location Services
|
||||
echo "<wait60s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Are you sure you don't want to use Location Services?
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Select Your Time Zone
|
||||
echo "<wait30s><tab>UTC<enter><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Analytics
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Screen Time
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Siri
|
||||
echo "<wait30s><tab><spacebar><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Choose Your Look
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
if [ "${release}" = "13" ] || [ "${release}" = "14" ]; then
|
||||
# Enable Voice Over
|
||||
echo "<wait30s><leftAltOn><f5><leftAltOff><wait5s>v"
|
||||
else
|
||||
# Welcome to Mac
|
||||
echo "<wait30s><spacebar>"
|
||||
|
||||
# Enable Keyboard navigation
|
||||
echo "<wait30s><leftAltOn><spacebar><leftAltOff>Terminal<enter>"
|
||||
echo "<wait30s>defaults write NSGlobalDomain AppleKeyboardUIMode -int 3<enter>"
|
||||
echo "<wait30s><leftAltOn>q<leftAltOff>"
|
||||
fi
|
||||
|
||||
# Now that the installation is done, open "System Settings"
|
||||
echo "<wait30s><leftAltOn><spacebar><leftAltOff>System Settings<enter>"
|
||||
|
||||
# Navigate to "Sharing"
|
||||
echo "<wait30s><leftAltOn>f<leftAltOff>sharing<enter>"
|
||||
|
||||
if [ "${release}" = "13" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><down><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Open "Remote Login" details
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Enable "Full Disk Access"
|
||||
echo "<wait30s><tab><spacebar>"
|
||||
|
||||
# Click "Done"
|
||||
echo "<wait30s><leftShiftOn><tab><leftShiftOff><leftShiftOn><tab><leftShiftOff><spacebar>"
|
||||
|
||||
# Disable Voice Over
|
||||
echo "<leftAltOn><f5><leftAltOff>"
|
||||
elif [ "${release}" = "14" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Disable Voice Over
|
||||
echo "<wait30s><leftAltOn><f5><leftAltOff>"
|
||||
elif [ "${release}" = "15" ]; then
|
||||
# Navigate to "Screen Sharing" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><spacebar>"
|
||||
|
||||
# Navigate to "Remote Login" and enable it
|
||||
echo "<wait30s><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><tab><spacebar>"
|
||||
fi
|
||||
|
||||
# Quit System Settings
|
||||
echo "<wait30s><leftAltOn>q<leftAltOff>"
|
||||
@@ -1,122 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script optimizes macOS for virtualized environments.
|
||||
# It disables things like spotlight, screen saver, and sleep.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/sickcodes/osx-optimizer
|
||||
# - https://github.com/koding88/MacBook-Optimization-Script
|
||||
# - https://www.macstadium.com/blog/simple-optimizations-for-macos-and-ios-build-agents
|
||||
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
echo "This script must be run using sudo." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
disable_software_update() {
|
||||
execute softwareupdate --schedule off
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -bool false
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool false
|
||||
execute defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 0
|
||||
execute defaults write com.apple.SoftwareUpdate AutomaticDownload -int 0
|
||||
execute defaults write com.apple.commerce AutoUpdate -bool false
|
||||
execute defaults write com.apple.commerce AutoUpdateRestartRequired -bool false
|
||||
}
|
||||
|
||||
disable_spotlight() {
|
||||
execute mdutil -i off -a
|
||||
execute mdutil -E /
|
||||
}
|
||||
|
||||
disable_siri() {
|
||||
execute launchctl unload -w /System/Library/LaunchAgents/com.apple.Siri.agent.plist
|
||||
execute defaults write com.apple.Siri StatusMenuVisible -bool false
|
||||
execute defaults write com.apple.Siri UserHasDeclinedEnable -bool true
|
||||
execute defaults write com.apple.assistant.support "Assistant Enabled" 0
|
||||
}
|
||||
|
||||
disable_sleep() {
|
||||
execute systemsetup -setsleep Never
|
||||
execute systemsetup -setcomputersleep Never
|
||||
execute systemsetup -setdisplaysleep Never
|
||||
execute systemsetup -setharddisksleep Never
|
||||
}
|
||||
|
||||
disable_screen_saver() {
|
||||
execute defaults write com.apple.screensaver loginWindowIdleTime 0
|
||||
execute defaults write com.apple.screensaver idleTime 0
|
||||
}
|
||||
|
||||
disable_screen_lock() {
|
||||
execute defaults write com.apple.loginwindow DisableScreenLock -bool true
|
||||
}
|
||||
|
||||
disable_wallpaper() {
|
||||
execute defaults write com.apple.loginwindow DesktopPicture ""
|
||||
}
|
||||
|
||||
disable_application_state() {
|
||||
execute defaults write com.apple.loginwindow TALLogoutSavesState -bool false
|
||||
}
|
||||
|
||||
disable_accessibility() {
|
||||
execute defaults write com.apple.Accessibility DifferentiateWithoutColor -int 1
|
||||
execute defaults write com.apple.Accessibility ReduceMotionEnabled -int 1
|
||||
execute defaults write com.apple.universalaccess reduceMotion -int 1
|
||||
execute defaults write com.apple.universalaccess reduceTransparency -int 1
|
||||
}
|
||||
|
||||
disable_dashboard() {
|
||||
execute defaults write com.apple.dashboard mcx-disabled -boolean YES
|
||||
execute killall Dock
|
||||
}
|
||||
|
||||
disable_animations() {
|
||||
execute defaults write NSGlobalDomain NSAutomaticWindowAnimationsEnabled -bool false
|
||||
execute defaults write -g QLPanelAnimationDuration -float 0
|
||||
execute defaults write com.apple.finder DisableAllAnimations -bool true
|
||||
}
|
||||
|
||||
disable_time_machine() {
|
||||
execute tmutil disable
|
||||
}
|
||||
|
||||
enable_performance_mode() {
|
||||
# https://support.apple.com/en-us/101992
|
||||
if ! [ $(nvram boot-args 2>/dev/null | grep -q serverperfmode) ]; then
|
||||
execute nvram boot-args="serverperfmode=1 $(nvram boot-args 2>/dev/null | cut -f 2-)"
|
||||
fi
|
||||
}
|
||||
|
||||
add_terminal_to_desktop() {
|
||||
execute ln -sf /System/Applications/Utilities/Terminal.app ~/Desktop/Terminal
|
||||
}
|
||||
|
||||
main() {
|
||||
disable_software_update
|
||||
disable_spotlight
|
||||
disable_siri
|
||||
disable_sleep
|
||||
disable_screen_saver
|
||||
disable_screen_lock
|
||||
disable_wallpaper
|
||||
disable_application_state
|
||||
disable_accessibility
|
||||
disable_dashboard
|
||||
disable_animations
|
||||
disable_time_machine
|
||||
enable_performance_mode
|
||||
add_terminal_to_desktop
|
||||
}
|
||||
|
||||
main
|
||||
@@ -1,78 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# This script generates a /etc/kcpassword file to enable auto-login on macOS.
|
||||
# Yes, this stores your password in plain text. Do NOT do this on your local machine.
|
||||
|
||||
# Sources:
|
||||
# - https://github.com/xfreebird/kcpassword/blob/master/kcpassword
|
||||
|
||||
if [ "$(id -u)" != "0" ]; then
|
||||
echo "This script must be run using sudo." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
execute() {
|
||||
echo "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
echo "Command failed: $@" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
kcpassword() {
|
||||
passwd="$1"
|
||||
key="7d 89 52 23 d2 bc dd ea a3 b9 1f"
|
||||
passwd_hex=$(printf "%s" "$passwd" | xxd -p | tr -d '\n')
|
||||
|
||||
key_len=33
|
||||
passwd_len=${#passwd_hex}
|
||||
remainder=$((passwd_len % key_len))
|
||||
if [ $remainder -ne 0 ]; then
|
||||
padding=$((key_len - remainder))
|
||||
passwd_hex="${passwd_hex}$(printf '%0*x' $((padding / 2)) 0)"
|
||||
fi
|
||||
|
||||
result=""
|
||||
i=0
|
||||
while [ $i -lt ${#passwd_hex} ]; do
|
||||
for byte in $key; do
|
||||
[ $i -ge ${#passwd_hex} ] && break
|
||||
p="${passwd_hex:$i:2}"
|
||||
r=$(printf '%02x' $((0x$p ^ 0x$byte)))
|
||||
result="${result}${r}"
|
||||
i=$((i + 2))
|
||||
done
|
||||
done
|
||||
|
||||
echo "$result"
|
||||
}
|
||||
|
||||
login() {
|
||||
username="$1"
|
||||
password="$2"
|
||||
|
||||
enable_passwordless_sudo() {
|
||||
execute mkdir -p /etc/sudoers.d/
|
||||
echo "${username} ALL=(ALL) NOPASSWD: ALL" | EDITOR=tee execute visudo "/etc/sudoers.d/${username}-nopasswd"
|
||||
}
|
||||
|
||||
enable_auto_login() {
|
||||
echo "00000000: 1ced 3f4a bcbc ba2c caca 4e82" | execute xxd -r - /etc/kcpassword
|
||||
execute defaults write /Library/Preferences/com.apple.loginwindow autoLoginUser "${username}"
|
||||
}
|
||||
|
||||
disable_screen_lock() {
|
||||
execute sysadminctl -screenLock off -password "${password}"
|
||||
}
|
||||
|
||||
enable_passwordless_sudo
|
||||
enable_auto_login
|
||||
disable_screen_lock
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
echo "Usage: $0 <username> <password>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
login "$@"
|
||||
@@ -1,78 +0,0 @@
|
||||
packer {
|
||||
required_plugins {
|
||||
tart = {
|
||||
version = ">= 1.12.0"
|
||||
source = "github.com/cirruslabs/tart"
|
||||
}
|
||||
external = {
|
||||
version = ">= 0.0.2"
|
||||
source = "github.com/joomcode/external"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable "release" {
|
||||
type = number
|
||||
default = 13
|
||||
}
|
||||
|
||||
variable "username" {
|
||||
type = string
|
||||
default = "admin"
|
||||
}
|
||||
|
||||
variable "password" {
|
||||
type = string
|
||||
default = "admin"
|
||||
}
|
||||
|
||||
variable "cpu_count" {
|
||||
type = number
|
||||
default = 2
|
||||
}
|
||||
|
||||
variable "memory_gb" {
|
||||
type = number
|
||||
default = 4
|
||||
}
|
||||
|
||||
variable "disk_size_gb" {
|
||||
type = number
|
||||
default = 50
|
||||
}
|
||||
|
||||
locals {
|
||||
sequoia = {
|
||||
tier = 1
|
||||
distro = "sequoia"
|
||||
release = "15"
|
||||
ipsw = "https://updates.cdn-apple.com/2024FallFCS/fullrestores/062-78489/BDA44327-C79E-4608-A7E0-455A7E91911F/UniversalMac_15.0_24A335_Restore.ipsw"
|
||||
}
|
||||
|
||||
sonoma = {
|
||||
tier = 2
|
||||
distro = "sonoma"
|
||||
release = "14"
|
||||
ipsw = "https://updates.cdn-apple.com/2023FallFCS/fullrestores/042-54934/0E101AD6-3117-4B63-9BF1-143B6DB9270A/UniversalMac_14.0_23A344_Restore.ipsw"
|
||||
}
|
||||
|
||||
ventura = {
|
||||
tier = 2
|
||||
distro = "ventura"
|
||||
release = "13"
|
||||
ipsw = "https://updates.cdn-apple.com/2022FallFCS/fullrestores/012-92188/2C38BCD1-2BFF-4A10-B358-94E8E28BE805/UniversalMac_13.0_22A380_Restore.ipsw"
|
||||
}
|
||||
|
||||
releases = {
|
||||
15 = local.sequoia
|
||||
14 = local.sonoma
|
||||
13 = local.ventura
|
||||
}
|
||||
|
||||
release = local.releases[var.release]
|
||||
username = var.username
|
||||
password = var.password
|
||||
cpu_count = var.cpu_count
|
||||
memory_gb = var.memory_gb
|
||||
disk_size_gb = var.disk_size_gb
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"bootstrap": "brew install gh jq cirruslabs/cli/tart cirruslabs/cli/sshpass hashicorp/tap/packer && packer init darwin",
|
||||
"login": "gh auth token | tart login ghcr.io --username $(gh api user --jq .login) --password-stdin",
|
||||
"fetch:image-name": "echo ghcr.io/oven-sh/bun-vm",
|
||||
"fetch:darwin-version": "echo 1",
|
||||
"fetch:macos-version": "sw_vers -productVersion | cut -d. -f1",
|
||||
"fetch:script-version": "cat ../scripts/bootstrap.sh | grep 'v=' | sed 's/v=\"//;s/\"//' | head -n 1",
|
||||
"build:darwin-aarch64-vanilla": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=$(bun fetch:macos-version) darwin/",
|
||||
"build:darwin-aarch64-vanilla-15": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=15 darwin/",
|
||||
"build:darwin-aarch64-vanilla-14": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=14 darwin/",
|
||||
"build:darwin-aarch64-vanilla-13": "packer build '-only=*.bun-darwin-aarch64-vanilla' -var release=13 darwin/",
|
||||
"build:darwin-aarch64": "packer build '-only=*.bun-darwin-aarch64' -var release=$(bun fetch:macos-version) darwin/",
|
||||
"build:darwin-aarch64-15": "packer build '-only=*.bun-darwin-aarch64' -var release=15 darwin/",
|
||||
"build:darwin-aarch64-14": "packer build '-only=*.bun-darwin-aarch64' -var release=14 darwin/",
|
||||
"build:darwin-aarch64-13": "packer build '-only=*.bun-darwin-aarch64' -var release=13 darwin/",
|
||||
"publish:darwin-aarch64-vanilla": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-vanilla-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-15": "tart push bun-darwin-aarch64-vanilla-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sequoia-15-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-14": "tart push bun-darwin-aarch64-vanilla-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-vanilla-sonoma-14-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64-vanilla-13": "tart push bun-darwin-aarch64-vanilla-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-vanilla-ventura-13-v$(bun fetch:darwin-version)\"",
|
||||
"publish:darwin-aarch64": "image=$(tart list --format json | jq -r \".[] | select(.Name | test(\\\"^bun-darwin-aarch64-.*-$(bun fetch:macos-version)$\\\")) | .Name\" | head -n 1 | sed 's/bun-//'); tart push \"bun-$image\" \"ghcr.io/oven-sh/bun-vm:$image-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-15": "tart push bun-darwin-aarch64-sequoia-15 \"$(bun fetch:image-name):darwin-aarch64-sequoia-15-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-14": "tart push bun-darwin-aarch64-sonoma-14 \"$(bun fetch:image-name):darwin-aarch64-sonoma-14-v$(bun fetch:script-version)\"",
|
||||
"publish:darwin-aarch64-13": "tart push bun-darwin-aarch64-ventura-13 \"$(bun fetch:image-name):darwin-aarch64-ventura-13-v$(bun fetch:script-version)\""
|
||||
}
|
||||
}
|
||||
@@ -128,7 +128,6 @@ optionx(CACHE_PATH FILEPATH "The path to the cache directory" DEFAULT ${BUILD_PA
|
||||
optionx(CACHE_STRATEGY "read-write|read-only|write-only|none" "The strategy to use for caching" DEFAULT "read-write")
|
||||
|
||||
optionx(CI BOOL "If CI is enabled" DEFAULT OFF)
|
||||
optionx(ENABLE_ANALYSIS BOOL "If static analysis targets should be enabled" DEFAULT OFF)
|
||||
|
||||
if(CI)
|
||||
set(WARNING FATAL_ERROR)
|
||||
@@ -146,6 +145,9 @@ endif()
|
||||
optionx(VENDOR_PATH FILEPATH "The path to the vendor directory" DEFAULT ${CWD}/vendor)
|
||||
optionx(TMP_PATH FILEPATH "The path to the temporary directory" DEFAULT ${BUILD_PATH}/tmp)
|
||||
|
||||
optionx(FRESH BOOL "Set when --fresh is used" DEFAULT OFF)
|
||||
optionx(CLEAN BOOL "Set when --clean is used" DEFAULT OFF)
|
||||
|
||||
# --- Helper functions ---
|
||||
|
||||
# setenv()
|
||||
|
||||
@@ -79,7 +79,7 @@ endif()
|
||||
|
||||
optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION})
|
||||
|
||||
if(RELEASE AND LINUX AND CI)
|
||||
if(RELEASE AND LINUX)
|
||||
set(DEFAULT_LTO ON)
|
||||
else()
|
||||
set(DEFAULT_LTO OFF)
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
# https://clang.llvm.org/docs/ClangFormat.html
|
||||
|
||||
file(GLOB BUN_H_SOURCES LIST_DIRECTORIES false ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/bun.js/bindings/*.h
|
||||
${CWD}/src/bun.js/modules/*.h
|
||||
find_command(
|
||||
VARIABLE
|
||||
CLANG_FORMAT_PROGRAM
|
||||
VERSION_VARIABLE
|
||||
LLVM_VERSION
|
||||
COMMAND
|
||||
clang-format
|
||||
VERSION
|
||||
${LLVM_VERSION}
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES} ${BUN_H_SOURCES})
|
||||
set(CLANG_FORMAT_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
# https://clang.llvm.org/extra/clang-tidy/
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
CLANG_TIDY_PROGRAM
|
||||
VERSION_VARIABLE
|
||||
LLVM_VERSION
|
||||
COMMAND
|
||||
clang-tidy
|
||||
VERSION
|
||||
${LLVM_VERSION}
|
||||
REQUIRED
|
||||
OFF
|
||||
)
|
||||
|
||||
set(CLANG_TIDY_SOURCES ${BUN_C_SOURCES} ${BUN_CXX_SOURCES})
|
||||
|
||||
set(CLANG_TIDY_COMMAND ${CLANG_TIDY_PROGRAM}
|
||||
|
||||
@@ -21,16 +21,10 @@ else()
|
||||
set(buns ${bun})
|
||||
endif()
|
||||
|
||||
# Some commands use this path, and some do not.
|
||||
# In the future, change those commands so that generated files are written to this path.
|
||||
optionx(CODEGEN_PATH FILEPATH "Path to the codegen directory" DEFAULT ${BUILD_PATH}/codegen)
|
||||
|
||||
if(RELEASE OR CI)
|
||||
set(DEFAULT_CODEGEN_EMBED ON)
|
||||
else()
|
||||
set(DEFAULT_CODEGEN_EMBED OFF)
|
||||
endif()
|
||||
|
||||
optionx(CODEGEN_EMBED BOOL "If codegen files should be embedded in the binary" DEFAULT ${DEFAULT_CODEGEN_EMBED})
|
||||
|
||||
if((NOT DEFINED CONFIGURE_DEPENDS AND NOT CI) OR CONFIGURE_DEPENDS)
|
||||
set(CONFIGURE_DEPENDS "CONFIGURE_DEPENDS")
|
||||
else()
|
||||
@@ -39,6 +33,39 @@ endif()
|
||||
|
||||
# --- Codegen ---
|
||||
|
||||
set(BUN_ZIG_IDENTIFIER_SOURCE ${CWD}/src/js_lexer)
|
||||
set(BUN_ZIG_IDENTIFIER_SCRIPT ${BUN_ZIG_IDENTIFIER_SOURCE}/identifier_data.zig)
|
||||
|
||||
file(GLOB BUN_ZIG_IDENTIFIER_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${BUN_ZIG_IDENTIFIER_SCRIPT}
|
||||
${BUN_ZIG_IDENTIFIER_SOURCE}/*.zig
|
||||
)
|
||||
|
||||
set(BUN_ZIG_IDENTIFIER_OUTPUTS
|
||||
${BUN_ZIG_IDENTIFIER_SOURCE}/id_continue_bitset.blob
|
||||
${BUN_ZIG_IDENTIFIER_SOURCE}/id_continue_bitset.meta.blob
|
||||
${BUN_ZIG_IDENTIFIER_SOURCE}/id_start_bitset.blob
|
||||
${BUN_ZIG_IDENTIFIER_SOURCE}/id_start_bitset.meta.blob
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-identifier-data
|
||||
COMMENT
|
||||
"Generating src/js_lexer/*.blob"
|
||||
COMMAND
|
||||
${ZIG_EXECUTABLE}
|
||||
run
|
||||
${CMAKE_ZIG_FLAGS}
|
||||
${BUN_ZIG_IDENTIFIER_SCRIPT}
|
||||
SOURCES
|
||||
${BUN_ZIG_IDENTIFIER_SOURCES}
|
||||
TARGETS
|
||||
clone-zig
|
||||
OUTPUTS
|
||||
${BUN_ZIG_IDENTIFIER_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_ERROR_SOURCE ${CWD}/packages/bun-error)
|
||||
|
||||
file(GLOB BUN_ERROR_SOURCES ${CONFIGURE_DEPENDS}
|
||||
@@ -49,7 +76,7 @@ file(GLOB BUN_ERROR_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${BUN_ERROR_SOURCE}/img/*
|
||||
)
|
||||
|
||||
set(BUN_ERROR_OUTPUT ${CODEGEN_PATH}/bun-error)
|
||||
set(BUN_ERROR_OUTPUT ${BUN_ERROR_SOURCE}/dist)
|
||||
set(BUN_ERROR_OUTPUTS
|
||||
${BUN_ERROR_OUTPUT}/index.js
|
||||
${BUN_ERROR_OUTPUT}/bun-error.css
|
||||
@@ -87,13 +114,13 @@ register_command(
|
||||
)
|
||||
|
||||
set(BUN_FALLBACK_DECODER_SOURCE ${CWD}/src/fallback.ts)
|
||||
set(BUN_FALLBACK_DECODER_OUTPUT ${CODEGEN_PATH}/fallback-decoder.js)
|
||||
set(BUN_FALLBACK_DECODER_OUTPUT ${CWD}/src/fallback.out.js)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-fallback-decoder
|
||||
COMMENT
|
||||
"Building fallback-decoder.js"
|
||||
"Building src/fallback.out.js"
|
||||
COMMAND
|
||||
${ESBUILD_EXECUTABLE} ${ESBUILD_ARGS}
|
||||
${BUN_FALLBACK_DECODER_SOURCE}
|
||||
@@ -110,7 +137,7 @@ register_command(
|
||||
)
|
||||
|
||||
set(BUN_RUNTIME_JS_SOURCE ${CWD}/src/runtime.bun.js)
|
||||
set(BUN_RUNTIME_JS_OUTPUT ${CODEGEN_PATH}/runtime.out.js)
|
||||
set(BUN_RUNTIME_JS_OUTPUT ${CWD}/src/runtime.out.js)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
@@ -140,7 +167,7 @@ file(GLOB BUN_NODE_FALLBACKS_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/*.js
|
||||
)
|
||||
|
||||
set(BUN_NODE_FALLBACKS_OUTPUT ${CODEGEN_PATH}/node-fallbacks)
|
||||
set(BUN_NODE_FALLBACKS_OUTPUT ${BUN_NODE_FALLBACKS_SOURCE}/out)
|
||||
set(BUN_NODE_FALLBACKS_OUTPUTS)
|
||||
foreach(source ${BUN_NODE_FALLBACKS_SOURCES})
|
||||
get_filename_component(filename ${source} NAME)
|
||||
@@ -160,7 +187,7 @@ register_command(
|
||||
TARGET
|
||||
bun-node-fallbacks
|
||||
COMMENT
|
||||
"Building node-fallbacks/*.js"
|
||||
"Building src/node-fallbacks/*.js"
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
@@ -297,41 +324,41 @@ register_command(
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
|
||||
set(BUN_KIT_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/kit-codegen.ts)
|
||||
|
||||
file(GLOB_RECURSE BUN_BAKE_RUNTIME_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/bake/*.ts
|
||||
${CWD}/src/bake/*/*.ts
|
||||
${CWD}/src/bake/*/*.css
|
||||
file(GLOB_RECURSE BUN_KIT_RUNTIME_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/kit/*.ts
|
||||
${CWD}/src/kit/*/*.ts
|
||||
${CWD}/src/kit/*/*.css
|
||||
)
|
||||
|
||||
list(APPEND BUN_BAKE_RUNTIME_CODEGEN_SOURCES
|
||||
list(APPEND BUN_KIT_RUNTIME_CODEGEN_SOURCES
|
||||
${CWD}/src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
)
|
||||
|
||||
set(BUN_BAKE_RUNTIME_OUTPUTS
|
||||
${CODEGEN_PATH}/bake.client.js
|
||||
${CODEGEN_PATH}/bake.server.js
|
||||
set(BUN_KIT_RUNTIME_OUTPUTS
|
||||
${CODEGEN_PATH}/kit_empty_file
|
||||
${CODEGEN_PATH}/kit.client.js
|
||||
${CODEGEN_PATH}/kit.server.js
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-bake-codegen
|
||||
bun-kit-codegen
|
||||
COMMENT
|
||||
"Bundling Kit Runtime"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
${BUN_KIT_RUNTIME_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
--codegen_root=${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_BAKE_RUNTIME_SOURCES}
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
${BUN_KIT_RUNTIME_SOURCES}
|
||||
${BUN_KIT_RUNTIME_CODEGEN_SOURCES}
|
||||
${BUN_KIT_RUNTIME_CODEGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${CODEGEN_PATH}/bake_empty_file
|
||||
${BUN_BAKE_RUNTIME_OUTPUTS}
|
||||
${BUN_KIT_RUNTIME_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts)
|
||||
@@ -464,6 +491,7 @@ list(APPEND BUN_ZIG_SOURCES
|
||||
)
|
||||
|
||||
set(BUN_ZIG_GENERATED_SOURCES
|
||||
${BUN_ZIG_IDENTIFIER_OUTPUTS}
|
||||
${BUN_ERROR_OUTPUTS}
|
||||
${BUN_FALLBACK_DECODER_OUTPUT}
|
||||
${BUN_RUNTIME_JS_OUTPUT}
|
||||
@@ -475,9 +503,9 @@ set(BUN_ZIG_GENERATED_SOURCES
|
||||
|
||||
# In debug builds, these are not embedded, but rather referenced at runtime.
|
||||
if (DEBUG)
|
||||
list(APPEND BUN_ZIG_GENERATED_SOURCES ${CODEGEN_PATH}/bake_empty_file)
|
||||
list(APPEND BUN_ZIG_GENERATED_SOURCES ${CODEGEN_PATH}/kit_empty_file)
|
||||
else()
|
||||
list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_BAKE_RUNTIME_OUTPUTS})
|
||||
list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_KIT_RUNTIME_OUTPUTS})
|
||||
endif()
|
||||
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
@@ -519,8 +547,7 @@ register_command(
|
||||
-Dsha=${REVISION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
-Dcodegen_path=${CODEGEN_PATH}
|
||||
-Dcodegen_embed=$<IF:$<BOOL:${CODEGEN_EMBED}>,true,false>
|
||||
-Dgenerated-code=${CODEGEN_PATH}
|
||||
ARTIFACTS
|
||||
${BUN_ZIG_OUTPUT}
|
||||
TARGETS
|
||||
@@ -547,7 +574,7 @@ file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/bun.js/bindings/webcrypto/*/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/*.cpp
|
||||
${CWD}/src/bun.js/bindings/v8/shim/*.cpp
|
||||
${CWD}/src/bake/*.cpp
|
||||
${CWD}/src/kit/*.cpp
|
||||
${CWD}/src/deps/*.cpp
|
||||
${BUN_USOCKETS_SOURCE}/src/crypto/*.cpp
|
||||
)
|
||||
@@ -856,33 +883,26 @@ else()
|
||||
-Wl,--as-needed
|
||||
-Wl,--gc-sections
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--wrap=cosf
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=fmod
|
||||
-Wl,--wrap=fmodf
|
||||
-Wl,--wrap=fstat
|
||||
-Wl,--wrap=fstat64
|
||||
-Wl,--wrap=fstatat
|
||||
-Wl,--wrap=fstatat64
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=pow
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log10f
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=lstat
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=stat
|
||||
-Wl,--wrap=fstat
|
||||
-Wl,--wrap=fstatat
|
||||
-Wl,--wrap=lstat64
|
||||
-Wl,--wrap=fstat64
|
||||
-Wl,--wrap=fstatat64
|
||||
-Wl,--wrap=mknod
|
||||
-Wl,--wrap=mknodat
|
||||
-Wl,--wrap=pow
|
||||
-Wl,--wrap=sincosf
|
||||
-Wl,--wrap=sinf
|
||||
-Wl,--wrap=stat
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=statx
|
||||
-Wl,--wrap=tanf
|
||||
-Wl,--wrap=fmod
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
-Wl,-z,lazy
|
||||
-Wl,-z,norelro
|
||||
|
||||
@@ -18,7 +18,6 @@ register_cmake_command(
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON
|
||||
-DCARES_SHARED=OFF
|
||||
-DCARES_BUILD_TOOLS=OFF # this was set to ON?
|
||||
-DCMAKE_INSTALL_LIBDIR=lib
|
||||
LIB_PATH
|
||||
lib
|
||||
LIBRARIES
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
82b2c2277a4d570187c07b376557dc5bde81d848
|
||||
4c283af60cdae205df5a872530c77e2a6a307d43
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
|
||||
@@ -11,13 +11,9 @@ find_command(
|
||||
COMMAND
|
||||
ccache
|
||||
REQUIRED
|
||||
${CI}
|
||||
ON
|
||||
)
|
||||
|
||||
if(NOT CCACHE_PROGRAM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(CCACHE_ARGS CMAKE_C_COMPILER_LAUNCHER CMAKE_CXX_COMPILER_LAUNCHER)
|
||||
foreach(arg ${CCACHE_ARGS})
|
||||
setx(${arg} ${CCACHE_PROGRAM})
|
||||
|
||||
@@ -1,9 +1,3 @@
|
||||
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON)
|
||||
|
||||
if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE)
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
else()
|
||||
@@ -12,13 +6,14 @@ endif()
|
||||
|
||||
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
|
||||
|
||||
string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" USE_LLVM_VERSION ${LLVM_VERSION})
|
||||
if(USE_LLVM_VERSION)
|
||||
set(LLVM_VERSION_MAJOR ${CMAKE_MATCH_1})
|
||||
set(LLVM_VERSION_MINOR ${CMAKE_MATCH_2})
|
||||
set(LLVM_VERSION_PATCH ${CMAKE_MATCH_3})
|
||||
string(REGEX MATCH "([0-9]+)\\.([0-9]+)\\.([0-9]+)" match ${LLVM_VERSION})
|
||||
if(NOT match)
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
||||
set(LLVM_VERSION_MAJOR ${CMAKE_MATCH_1})
|
||||
set(LLVM_VERSION_MINOR ${CMAKE_MATCH_2})
|
||||
set(LLVM_VERSION_PATCH ${CMAKE_MATCH_3})
|
||||
set(LLVM_PATHS)
|
||||
|
||||
if(APPLE)
|
||||
@@ -37,40 +32,26 @@ if(APPLE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
list(APPEND LLVM_PATHS ${HOMEBREW_PREFIX}/opt/llvm/bin)
|
||||
|
||||
if(USE_LLVM_VERSION)
|
||||
list(APPEND LLVM_PATHS ${HOMEBREW_PREFIX}/opt/llvm@${LLVM_VERSION_MAJOR}/bin)
|
||||
endif()
|
||||
list(APPEND LLVM_PATHS
|
||||
${HOMEBREW_PREFIX}/opt/llvm@${LLVM_VERSION_MAJOR}/bin
|
||||
${HOMEBREW_PREFIX}/opt/llvm/bin
|
||||
)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
list(APPEND LLVM_PATHS /usr/lib/llvm/bin)
|
||||
|
||||
if(USE_LLVM_VERSION)
|
||||
list(APPEND LLVM_PATHS
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}/bin
|
||||
)
|
||||
endif()
|
||||
list(APPEND LLVM_PATHS
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}/bin
|
||||
/usr/lib/llvm-${LLVM_VERSION_MAJOR}/bin
|
||||
/usr/lib/llvm/bin
|
||||
)
|
||||
endif()
|
||||
|
||||
macro(find_llvm_command variable command)
|
||||
set(commands ${command})
|
||||
|
||||
if(USE_LLVM_VERSION)
|
||||
list(APPEND commands
|
||||
${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}
|
||||
${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}
|
||||
${command}-${LLVM_VERSION_MAJOR}
|
||||
)
|
||||
endif()
|
||||
|
||||
find_command(
|
||||
VARIABLE ${variable}
|
||||
VERSION_VARIABLE LLVM_VERSION
|
||||
COMMAND ${commands}
|
||||
COMMAND ${command} ${command}-${LLVM_VERSION_MAJOR}
|
||||
PATHS ${LLVM_PATHS}
|
||||
VERSION ${LLVM_VERSION}
|
||||
)
|
||||
@@ -78,21 +59,12 @@ macro(find_llvm_command variable command)
|
||||
endmacro()
|
||||
|
||||
macro(find_llvm_command_no_version variable command)
|
||||
set(commands ${command})
|
||||
|
||||
if(USE_LLVM_VERSION)
|
||||
list(APPEND commands
|
||||
${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}.${LLVM_VERSION_PATCH}
|
||||
${command}-${LLVM_VERSION_MAJOR}.${LLVM_VERSION_MINOR}
|
||||
${command}-${LLVM_VERSION_MAJOR}
|
||||
)
|
||||
endif()
|
||||
|
||||
find_command(
|
||||
VARIABLE ${variable}
|
||||
VERSION_VARIABLE LLVM_VERSION
|
||||
COMMAND ${commands}
|
||||
COMMAND ${command} ${command}-${LLVM_VERSION_MAJOR}
|
||||
PATHS ${LLVM_PATHS}
|
||||
REQUIRED ON
|
||||
)
|
||||
list(APPEND CMAKE_ARGS -D${variable}=${${variable}})
|
||||
endmacro()
|
||||
@@ -114,8 +86,3 @@ else()
|
||||
find_llvm_command(CMAKE_DSYMUTIL dsymutil)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(ENABLE_ANALYSIS)
|
||||
find_llvm_command(CLANG_FORMAT_PROGRAM clang-format)
|
||||
find_llvm_command(CLANG_TIDY_PROGRAM clang-tidy)
|
||||
endif()
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 9b84f43643eff64ab46daec9b860de262c80f5e2)
|
||||
set(WEBKIT_VERSION 76798f7b2fb287ee9f1ecce98bae895a2d026d93)
|
||||
endif()
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
|
||||
@@ -14,7 +14,7 @@ In Bun v1.1.9, we added support for DNS caching. This cache makes repeated conne
|
||||
|
||||
At the time of writing, we cache up to 255 entries for a maximum of 30 seconds (each). If any connections to a host fail, we remove the entry from the cache. When multiple connections are made to the same host simultaneously, DNS lookups are deduplicated to avoid making multiple requests for the same host.
|
||||
|
||||
This cache is automatically used by:
|
||||
This cache is automatically used by;
|
||||
|
||||
- `bun install`
|
||||
- `fetch()`
|
||||
@@ -99,7 +99,7 @@ console.log(stats);
|
||||
|
||||
### Configuring DNS cache TTL
|
||||
|
||||
Bun defaults to 30 seconds for the TTL of DNS cache entries. To change this, you can set the environment variable `$BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS`. For example, to set the TTL to 5 seconds:
|
||||
Bun defaults to 30 seconds for the TTL of DNS cache entries. To change this, you can set the envionrment variable `$BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS`. For example, to set the TTL to 5 seconds:
|
||||
|
||||
```sh
|
||||
BUN_CONFIG_DNS_TIME_TO_LIVE_SECONDS=5 bun run my-script.ts
|
||||
|
||||
@@ -402,7 +402,7 @@ Bun.serve({
|
||||
});
|
||||
```
|
||||
|
||||
### Server name indication (SNI)
|
||||
### Sever name indication (SNI)
|
||||
|
||||
To configure the server name indication (SNI) for the server, set the `serverName` field in the `tls` object.
|
||||
|
||||
|
||||
@@ -179,7 +179,7 @@ proc.kill(); // specify an exit code
|
||||
|
||||
The parent `bun` process will not terminate until all child processes have exited. Use `proc.unref()` to detach the child process from the parent.
|
||||
|
||||
```ts
|
||||
```
|
||||
const proc = Bun.spawn(["bun", "--version"]);
|
||||
proc.unref();
|
||||
```
|
||||
|
||||
@@ -325,28 +325,6 @@ As a performance optimization, the class constructor is not called, default init
|
||||
|
||||
The database columns are set as properties on the class instance.
|
||||
|
||||
### `.iterate()` (`@@iterator`)
|
||||
|
||||
Use `.iterate()` to run a query and incrementally return results. This is useful for large result sets that you want to process one row at a time without loading all the results into memory.
|
||||
|
||||
```ts
|
||||
const query = db.query("SELECT * FROM foo");
|
||||
for (const row of query.iterate()) {
|
||||
console.log(row);
|
||||
}
|
||||
```
|
||||
|
||||
You can also use the `@@iterator` protocol:
|
||||
|
||||
```ts
|
||||
const query = db.query("SELECT * FROM foo");
|
||||
for (const row of query) {
|
||||
console.log(row);
|
||||
}
|
||||
```
|
||||
|
||||
This feature was added in Bun v1.1.31.
|
||||
|
||||
### `.values()`
|
||||
|
||||
Use `values()` to run a query and get back all results as an array of arrays.
|
||||
|
||||
@@ -580,65 +580,6 @@ const foo = new Foo();
|
||||
console.log(foo); // => "foo"
|
||||
```
|
||||
|
||||
## `Bun.inspect.table(tabularData, properties, options)`
|
||||
|
||||
Format tabular data into a string. Like [`console.table`](https://developer.mozilla.org/en-US/docs/Web/API/console/table_static), except it returns a string rather than printing to the console.
|
||||
|
||||
```ts
|
||||
console.log(
|
||||
Bun.inspect.table([
|
||||
{ a: 1, b: 2, c: 3 },
|
||||
{ a: 4, b: 5, c: 6 },
|
||||
{ a: 7, b: 8, c: 9 },
|
||||
]),
|
||||
);
|
||||
//
|
||||
// ┌───┬───┬───┬───┐
|
||||
// │ │ a │ b │ c │
|
||||
// ├───┼───┼───┼───┤
|
||||
// │ 0 │ 1 │ 2 │ 3 │
|
||||
// │ 1 │ 4 │ 5 │ 6 │
|
||||
// │ 2 │ 7 │ 8 │ 9 │
|
||||
// └───┴───┴───┴───┘
|
||||
```
|
||||
|
||||
Additionally, you can pass an array of property names to display only a subset of properties.
|
||||
|
||||
```ts
|
||||
console.log(
|
||||
Bun.inspect.table(
|
||||
[
|
||||
{ a: 1, b: 2, c: 3 },
|
||||
{ a: 4, b: 5, c: 6 },
|
||||
],
|
||||
["a", "c"],
|
||||
),
|
||||
);
|
||||
//
|
||||
// ┌───┬───┬───┐
|
||||
// │ │ a │ c │
|
||||
// ├───┼───┼───┤
|
||||
// │ 0 │ 1 │ 3 │
|
||||
// │ 1 │ 4 │ 6 │
|
||||
// └───┴───┴───┘
|
||||
```
|
||||
|
||||
You can also conditionally enable ANSI colors by passing `{ colors: true }`.
|
||||
|
||||
```ts
|
||||
console.log(
|
||||
Bun.inspect.table(
|
||||
[
|
||||
{ a: 1, b: 2, c: 3 },
|
||||
{ a: 4, b: 5, c: 6 },
|
||||
],
|
||||
{
|
||||
colors: true,
|
||||
},
|
||||
),
|
||||
);
|
||||
```
|
||||
|
||||
## `Bun.nanoseconds()`
|
||||
|
||||
Returns the number of nanoseconds since the current `bun` process started, as a `number`. Useful for high-precision timing and benchmarking.
|
||||
|
||||
@@ -751,7 +751,7 @@ $ bun build ./index.tsx --outdir ./out --external '*'
|
||||
|
||||
### `packages`
|
||||
|
||||
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun treats any import which path do not start with `.`, `..` or `/` as package.
|
||||
Control whatever package dependencies are included to bundle or not. Possible values: `bundle` (default), `external`. Bun threats any import which path do not start with `.`, `..` or `/` as package.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
@@ -1090,84 +1090,6 @@ $ bun build ./index.tsx --outdir ./out --loader .png:dataurl --loader .txt:file
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `banner`
|
||||
|
||||
A banner to be added to the final bundle, this can be a directive like "use client" for react or a comment block such as a license for the code.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
banner: '"use client";'
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --banner "\"use client\";"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `footer`
|
||||
|
||||
A footer to be added to the final bundle, this can be something like a comment block for a license or just a fun easter egg.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
footer: '// built with love in SF'
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --footer="// built with love in SF"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `drop`
|
||||
|
||||
Remove function calls from a bundle. For example, `--drop=console` will remove all calls to `console.log`. Arguments to calls will also be removed, regardless of if those arguments may have side effects. Dropping `debugger` will remove all `debugger` statements.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
drop: ["console", "debugger", "anyIdentifier.or.propertyAccess"],
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=anyIdentifier.or.propertyAccess
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `experimentalCss`
|
||||
|
||||
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`.
|
||||
|
||||
This supports bundling CSS files imported from JS, as well as CSS entrypoints.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./index.ts"],
|
||||
experimentalCss: true,
|
||||
});
|
||||
// => { success: boolean, outputs: BuildArtifact[], logs: BuildMessage[] }
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Outputs
|
||||
|
||||
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
|
||||
|
||||
@@ -154,14 +154,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--banner`
|
||||
- `--banner`
|
||||
- Only applies to js bundles
|
||||
|
||||
---
|
||||
|
||||
- `--footer`
|
||||
- `--footer`
|
||||
- Only applies to js bundles
|
||||
- n/a
|
||||
- Not supported
|
||||
|
||||
---
|
||||
|
||||
@@ -190,7 +184,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--drop`
|
||||
- `--drop`
|
||||
- n/a
|
||||
- Not supported
|
||||
|
||||
---
|
||||
|
||||
@@ -200,6 +195,12 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
---
|
||||
|
||||
- `--footer`
|
||||
- n/a
|
||||
- Not supported
|
||||
|
||||
---
|
||||
|
||||
- `--global-name`
|
||||
- n/a
|
||||
- Not applicable, Bun does not support `iife` output at this time
|
||||
|
||||
@@ -1,22 +1,5 @@
|
||||
The `bun pm` command group provides a set of utilities for working with Bun's package manager.
|
||||
|
||||
## pack
|
||||
|
||||
To create a tarball of the current workspace:
|
||||
|
||||
```bash
|
||||
$ bun pm pack
|
||||
```
|
||||
|
||||
Options for the `pack` command:
|
||||
|
||||
- `--dry-run`: Perform all tasks except writing the tarball to disk.
|
||||
- `--destination`: Specify the directory where the tarball will be saved.
|
||||
- `--ignore-scripts`: Skip running pre/postpack and prepare scripts.
|
||||
- `--gzip-level`: Set a custom compression level for gzip, ranging from 0 to 9 (default is 9).
|
||||
|
||||
## bin
|
||||
|
||||
To print the path to the `bin` directory for the local project:
|
||||
|
||||
```bash
|
||||
@@ -31,8 +14,6 @@ $ bun pm bin -g
|
||||
<$HOME>/.bun/bin
|
||||
```
|
||||
|
||||
## ls
|
||||
|
||||
To print a list of installed dependencies in the current project and their resolved versions, excluding their dependencies.
|
||||
|
||||
```bash
|
||||
@@ -64,36 +45,6 @@ $ bun pm ls --all
|
||||
├── ...
|
||||
```
|
||||
|
||||
## whoami
|
||||
|
||||
Print your npm username. Requires you to be logged in (`bunx npm login`) with credentials in either `bunfig.toml` or `.npmrc`:
|
||||
|
||||
```bash
|
||||
$ bun pm whoami
|
||||
```
|
||||
|
||||
## hash
|
||||
|
||||
To generate and print the hash of the current lockfile:
|
||||
|
||||
```bash
|
||||
$ bun pm hash
|
||||
```
|
||||
|
||||
To print the string used to hash the lockfile:
|
||||
|
||||
```bash
|
||||
$ bun pm hash-string
|
||||
```
|
||||
|
||||
To print the hash stored in the current lockfile:
|
||||
|
||||
```bash
|
||||
$ bun pm hash-print
|
||||
```
|
||||
|
||||
## cache
|
||||
|
||||
To print the path to Bun's global module cache:
|
||||
|
||||
```bash
|
||||
@@ -106,45 +57,16 @@ To clear Bun's global module cache:
|
||||
$ bun pm cache rm
|
||||
```
|
||||
|
||||
## migrate
|
||||
## List global installs
|
||||
|
||||
To migrate another package manager's lockfile without installing anything:
|
||||
To list all globally installed packages:
|
||||
|
||||
```bash
|
||||
$ bun pm migrate
|
||||
$ bun pm ls -g
|
||||
```
|
||||
|
||||
## untrusted
|
||||
|
||||
To print current untrusted dependencies with scripts:
|
||||
To list all globally installed packages, including nth-order dependencies:
|
||||
|
||||
```bash
|
||||
$ bun pm untrusted
|
||||
|
||||
./node_modules/@biomejs/biome @1.8.3
|
||||
» [postinstall]: node scripts/postinstall.js
|
||||
|
||||
These dependencies had their lifecycle scripts blocked during install.
|
||||
$ bun pm ls -g --all
|
||||
```
|
||||
|
||||
## trust
|
||||
|
||||
To run scripts for untrusted dependencies and add to `trustedDependencies`:
|
||||
|
||||
```bash
|
||||
$ bun pm trust <names>
|
||||
```
|
||||
|
||||
Options for the `trust` command:
|
||||
|
||||
- `--all`: Trust all untrusted dependencies.
|
||||
|
||||
## default-trusted
|
||||
|
||||
To print the default trusted dependencies list:
|
||||
|
||||
```bash
|
||||
$ bun pm default-trusted
|
||||
```
|
||||
|
||||
see the current list on GitHub [here](https://github.com/oven-sh/bun/blob/main/src/install/default-trusted-dependencies.txt)
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
Use `bun publish` to publish a package to the npm registry.
|
||||
|
||||
`bun publish` will automatically pack your package into a tarball, strip workspace protocols from the `package.json` (resolving versions if necessary), and publish to the registry specified in your configuration files. Both `bunfig.toml` and `.npmrc` files are supported.
|
||||
|
||||
```sh
|
||||
## Publishing the package from the current working directory
|
||||
$ bun publish
|
||||
|
||||
## Output
|
||||
bun publish v1.1.30 (ca7428e9)
|
||||
|
||||
packed 203B package.json
|
||||
packed 224B README.md
|
||||
packed 30B index.ts
|
||||
packed 0.64KB tsconfig.json
|
||||
|
||||
Total files: 4
|
||||
Shasum: 79e2b4377b63f4de38dc7ea6e5e9dbee08311a69
|
||||
Integrity: sha512-6QSNlDdSwyG/+[...]X6wXHriDWr6fA==
|
||||
Unpacked size: 1.1KB
|
||||
Packed size: 0.76KB
|
||||
Tag: latest
|
||||
Access: default
|
||||
Registry: http://localhost:4873/
|
||||
|
||||
+ publish-1@1.0.0
|
||||
```
|
||||
|
||||
Alternatively, you can pack and publish your package separately by using `bun pm pack` followed by `bun publish` with the path to the output tarball.
|
||||
|
||||
```sh
|
||||
$ bun pm pack
|
||||
...
|
||||
$ bun publish ./package.tgz
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note** - `bun publish` will not run lifecycle scripts (`prepublishOnly/prepack/prepare/postpack/publish/postpublish`) if a tarball path is provided. Scripts will only be run if the package is packed by `bun publish`.
|
||||
{% /callout %}
|
||||
|
||||
### `--access`
|
||||
|
||||
The `--access` flag can be used to set the access level of the package being published. The access level can be one of `public` or `restricted`. Unscoped packages are always public, and attempting to publish an unscoped package with `--access restricted` will result in an error.
|
||||
|
||||
```sh
|
||||
$ bun publish --access public
|
||||
```
|
||||
|
||||
`--access` can also be set in the `publishConfig` field of your `package.json`.
|
||||
|
||||
```json
|
||||
{
|
||||
"publishConfig": {
|
||||
"access": "restricted"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `--tag`
|
||||
|
||||
Set the tag of the package version being published. By default, the tag is `latest`. The initial version of a package is always given the `latest` tag in addition to the specified tag.
|
||||
|
||||
```sh
|
||||
$ bun publish --tag alpha
|
||||
```
|
||||
|
||||
`--tag` can also be set in the `publishConfig` field of your `package.json`.
|
||||
|
||||
```json
|
||||
{
|
||||
"publishConfig": {
|
||||
"tag": "next"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `--dry-run`
|
||||
|
||||
The `--dry-run` flag can be used to simulate the publish process without actually publishing the package. This is useful for verifying the contents of the published package without actually publishing the package.
|
||||
|
||||
```sh
|
||||
$ bun publish --dry-run
|
||||
```
|
||||
|
||||
### `--auth-type`
|
||||
|
||||
If you have 2FA enabled for your npm account, `bun publish` will prompt you for a one-time password. This can be done through a browser or the CLI. The `--auth-type` flag can be used to tell the npm registry which method you prefer. The possible values are `web` and `legacy`, with `web` being the default.
|
||||
|
||||
```sh
|
||||
$ bun publish --auth-type legacy
|
||||
...
|
||||
This operation requires a one-time password.
|
||||
Enter OTP: 123456
|
||||
...
|
||||
```
|
||||
|
||||
### `--otp`
|
||||
|
||||
Provide a one-time password directly to the CLI. If the password is valid, this will skip the extra prompt for a one-time password before publishing. Example usage:
|
||||
|
||||
```sh
|
||||
$ bun publish --otp 123456
|
||||
```
|
||||
|
||||
### `--gzip-level`
|
||||
|
||||
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
|
||||
@@ -2,7 +2,13 @@
|
||||
name: Build an app with Next.js and Bun
|
||||
---
|
||||
|
||||
Initialize a Next.js app with `create-next-app`. This will scaffold a new Next.js project and automatically install dependencies.
|
||||
{% callout %}
|
||||
The Next.js [App Router](https://nextjs.org/docs/app) currently relies on Node.js APIs that Bun does not yet implement. The guide below uses Bun to initialize a project and install dependencies, but it uses Node.js to run the dev server.
|
||||
{% /callout %}
|
||||
|
||||
---
|
||||
|
||||
Initialize a Next.js app with `create-next-app`. This automatically installs dependencies using `npm`.
|
||||
|
||||
```sh
|
||||
$ bun create next-app
|
||||
|
||||
@@ -37,10 +37,7 @@ Alternatively, you can create a PM2 configuration file. Create a file named `pm2
|
||||
module.exports = {
|
||||
name: "app", // Name of your application
|
||||
script: "index.ts", // Entry point of your application
|
||||
interpreter: "bun", // Bun interpreter
|
||||
env: {
|
||||
PATH: `${process.env.HOME}/.bun/bin:${process.env.PATH}`, // Add "~/.bun/bin/bun" to PATH
|
||||
}
|
||||
interpreter: "~/.bun/bin/bun", // Path to the Bun interpreter
|
||||
};
|
||||
```
|
||||
|
||||
|
||||
@@ -2,62 +2,56 @@
|
||||
name: Build an app with SvelteKit and Bun
|
||||
---
|
||||
|
||||
Use `sv create my-app` to create a SvelteKit project with SvelteKit CLI. Answer the prompts to select a template and set up your development environment.
|
||||
Use `bun create` to scaffold your app with the `svelte` package. Answer the prompts to select a template and set up your development environment.
|
||||
|
||||
```sh
|
||||
$ bunx sv create my-app
|
||||
┌ Welcome to the Svelte CLI! (v0.5.7)
|
||||
$ bun create svelte@latest my-app
|
||||
┌ Welcome to SvelteKit!
|
||||
│
|
||||
◇ Which template would you like?
|
||||
│ SvelteKit demo
|
||||
◇ Which Svelte app template?
|
||||
│ SvelteKit demo app
|
||||
│
|
||||
◇ Add type checking with Typescript?
|
||||
│ Yes, using Typescript syntax
|
||||
◇ Add type checking with TypeScript?
|
||||
│ Yes, using TypeScript syntax
|
||||
│
|
||||
◆ Project created
|
||||
◇ Select additional options (use arrow keys/space bar)
|
||||
│ None
|
||||
│
|
||||
◇ What would you like to add to your project?
|
||||
│ none
|
||||
│
|
||||
◇ Which package manager do you want to install dependencies with?
|
||||
│ bun
|
||||
│
|
||||
◇ Successfully installed dependencies
|
||||
│
|
||||
◇ Project next steps ─────────────────────────────────────────────────────╮
|
||||
│ │
|
||||
│ 1: cd my-app │
|
||||
│ 2: git init && git add -A && git commit -m "Initial commit" (optional) │
|
||||
│ 3: bun run dev -- --open │
|
||||
│ │
|
||||
│ To close the dev server, hit Ctrl-C │
|
||||
│ │
|
||||
│ Stuck? Visit us at https://svelte.dev/chat │
|
||||
│ │
|
||||
├──────────────────────────────────────────────────────────────────────────╯
|
||||
│
|
||||
└ You're all set!
|
||||
└ Your project is ready!
|
||||
|
||||
✔ Typescript
|
||||
Inside Svelte components, use <script lang="ts">
|
||||
|
||||
Install community-maintained integrations:
|
||||
https://github.com/svelte-add/svelte-add
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Once the project is initialized, `cd` into the new project. You don't need to run 'bun install' since the dependencies are already installed.
|
||||
Once the project is initialized, `cd` into the new project and install dependencies.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun install
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Then start the development server with `bun --bun run dev`.
|
||||
|
||||
To run the dev server with Node.js instead of Bun, you can omit the `--bun` flag.
|
||||
|
||||
```sh
|
||||
$ cd my-app
|
||||
$ bun --bun run dev
|
||||
$ vite dev
|
||||
|
||||
Forced re-optimization of dependencies
|
||||
|
||||
VITE v5.4.10 ready in 424 ms
|
||||
|
||||
|
||||
VITE v4.4.9 ready in 895 ms
|
||||
|
||||
➜ Local: http://localhost:5173/
|
||||
➜ Network: use --host to expose
|
||||
➜ press h + enter to show help
|
||||
➜ press h to show help
|
||||
```
|
||||
|
||||
---
|
||||
@@ -81,22 +75,16 @@ Now, make the following changes to your `svelte.config.js`.
|
||||
```ts-diff
|
||||
- import adapter from "@sveltejs/adapter-auto";
|
||||
+ import adapter from "svelte-adapter-bun";
|
||||
import { vitePreprocess } from '@sveltejs/vite-plugin-svelte';
|
||||
import { vitePreprocess } from "@sveltejs/kit/vite";
|
||||
|
||||
/** @type {import('@sveltejs/kit').Config} */
|
||||
const config = {
|
||||
// Consult https://svelte.dev/docs/kit/integrations#preprocessors
|
||||
// for more information about preprocessors
|
||||
preprocess: vitePreprocess(),
|
||||
|
||||
kit: {
|
||||
// adapter-auto only supports some environments, see https://svelte.dev/docs/kit/adapter-auto for a list.
|
||||
// If your environment is not supported, or you settled on a specific environment, switch out the adapter.
|
||||
// See https://svelte.dev/docs/kit/adapters for more information about adapters.
|
||||
adapter: adapter()
|
||||
}
|
||||
kit: {
|
||||
adapter: adapter(),
|
||||
},
|
||||
preprocess: vitePreprocess(),
|
||||
};
|
||||
|
||||
|
||||
export default config;
|
||||
```
|
||||
|
||||
@@ -105,21 +93,28 @@ Now, make the following changes to your `svelte.config.js`.
|
||||
To build a production bundle:
|
||||
|
||||
```sh
|
||||
$ bun --bun run build
|
||||
$ vite build
|
||||
vite v5.4.10 building SSR bundle for production...
|
||||
"confetti" is imported from external module "@neoconfetti/svelte" but never used in "src/routes/sverdle/+page.svelte".
|
||||
✓ 130 modules transformed.
|
||||
vite v5.4.10 building for production...
|
||||
✓ 148 modules transformed.
|
||||
...
|
||||
✓ built in 231ms
|
||||
...
|
||||
✓ built in 899ms
|
||||
|
||||
Run npm run preview to preview your production build locally.
|
||||
|
||||
> Using svelte-adapter-bun
|
||||
✔ Start server with: bun ./build/index.js
|
||||
✔ done
|
||||
$ bun run build
|
||||
$ vite build
|
||||
|
||||
vite v4.4.9 building SSR bundle for production...
|
||||
transforming (60) node_modules/@sveltejs/kit/src/utils/escape.js
|
||||
|
||||
✓ 98 modules transformed.
|
||||
Generated an empty chunk: "entries/endpoints/waitlist/_server.ts".
|
||||
|
||||
vite v4.4.9 building for production...
|
||||
✓ 92 modules transformed.
|
||||
Generated an empty chunk: "7".
|
||||
.svelte-kit/output/client/_app/version.json 0.03 kB │ gzip: 0.05 kB
|
||||
|
||||
...
|
||||
|
||||
.svelte-kit/output/server/index.js 86.47 kB
|
||||
|
||||
Run npm run preview to preview your production build locally.
|
||||
|
||||
> Using svelte-adapter-bun
|
||||
✔ Start server with: bun ./build/index.js
|
||||
✔ done
|
||||
✓ built in 7.81s
|
||||
```
|
||||
|
||||
@@ -17,7 +17,7 @@ If you are seeing one of the following errors, you are probably trying to use a
|
||||
|
||||
---
|
||||
|
||||
To allow Bun to execute lifecycle scripts for a specific package, add the package to `trustedDependencies` in your package.json file. You can do this automatically by running the command `bun pm trust <pkg>`.
|
||||
To tell Bun to allow lifecycle scripts for a particular package, add the package to `trustedDependencies` in your package.json.
|
||||
|
||||
{% callout %}
|
||||
Note that this only allows lifecycle scripts for the specific package listed in `trustedDependencies`, _not_ the dependencies of that dependency!
|
||||
|
||||
@@ -27,6 +27,16 @@ data.version; // => "1.0.0"
|
||||
data.author.name; // => "John Dough"
|
||||
```
|
||||
|
||||
Bun also supports [Import Attributes](https://github.com/tc39/proposal-import-attributes/) and [JSON modules](https://github.com/tc39/proposal-json-modules) syntax.
|
||||
|
||||
```ts
|
||||
import data from "./package.json" with { type: "json" };
|
||||
|
||||
data.name; // => "bun"
|
||||
data.version; // => "1.0.0"
|
||||
data.author.name; // => "John Dough"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Bun also supports [Import Attributes](https://github.com/tc39/proposal-import-attributes/) and [JSON modules](https://github.com/tc39/proposal-json-modules) syntax.
|
||||
|
||||
@@ -16,7 +16,7 @@ Add the following to your local or global `.gitattributes` file:
|
||||
*.lockb binary diff=lockb
|
||||
```
|
||||
|
||||
Then add the following to your local git config with:
|
||||
Then add the following to you local git config with:
|
||||
|
||||
```sh
|
||||
$ git config diff.lockb.textconv bun
|
||||
|
||||
@@ -72,9 +72,8 @@ There are also image variants for different operating systems.
|
||||
```bash
|
||||
$ docker pull oven/bun:debian
|
||||
$ docker pull oven/bun:slim
|
||||
$ docker pull oven/bun:alpine
|
||||
$ docker pull oven/bun:distroless
|
||||
# alpine not recommended until #918 is fixed
|
||||
# $ docker pull oven/bun:alpine
|
||||
```
|
||||
|
||||
## Checking installation
|
||||
|
||||
@@ -164,9 +164,6 @@ export default {
|
||||
page("cli/update", "`bun update`", {
|
||||
description: "Update your project's dependencies.",
|
||||
}),
|
||||
page("cli/publish", "`bun publish`", {
|
||||
description: "Publish your package to an npm registry.",
|
||||
}),
|
||||
page("cli/outdated", "`bun outdated`", {
|
||||
description: "Check for outdated dependencies.",
|
||||
}),
|
||||
|
||||
@@ -370,19 +370,6 @@ myorg = { username = "myusername", password = "$npm_password", url = "https://re
|
||||
myorg = { token = "$npm_token", url = "https://registry.myorg.com/" }
|
||||
```
|
||||
|
||||
### `install.ca` and `install.cafile`
|
||||
|
||||
To configure a CA certificate, use `install.ca` or `install.cafile` to specify a path to a CA certificate file.
|
||||
|
||||
```toml
|
||||
[install]
|
||||
# The CA certificate as a string
|
||||
ca = "-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----"
|
||||
|
||||
# A path to a CA certificate file. The file can contain multiple certificates.
|
||||
cafile = "path/to/cafile"
|
||||
```
|
||||
|
||||
### `install.cache`
|
||||
|
||||
To configure the cache behavior:
|
||||
|
||||
@@ -238,30 +238,6 @@ If `exports` is not defined, Bun falls back to `"module"` (ESM imports only) the
|
||||
}
|
||||
```
|
||||
|
||||
### Custom conditions
|
||||
|
||||
The `--conditions` flag allows you to specify a list of conditions to use when resolving packages from package.json `"exports"`.
|
||||
|
||||
This flag is supported in both `bun build` and Bun's runtime.
|
||||
|
||||
```sh
|
||||
# Use it with bun build:
|
||||
$ bun build --conditions="react-server" --target=bun ./app/foo/route.js
|
||||
|
||||
# Use it with bun's runtime:
|
||||
$ bun --conditions="react-server" ./app/foo/route.js
|
||||
```
|
||||
|
||||
You can also use `conditions` programmatically with `Bun.build`:
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
conditions: ["react-server"],
|
||||
target: "bun",
|
||||
entryPoints: ["./app/foo/route.js"],
|
||||
});
|
||||
```
|
||||
|
||||
## Path re-mapping
|
||||
|
||||
In the spirit of treating TypeScript as a first-class citizen, the Bun runtime will re-map import paths according to the [`compilerOptions.paths`](https://www.typescriptlang.org/tsconfig#paths) field in `tsconfig.json`. This is a major divergence from Node.js, which doesn't support any form of import path re-mapping.
|
||||
|
||||
@@ -65,7 +65,7 @@ Some methods are not optimized yet.
|
||||
|
||||
### [`node:http2`](https://nodejs.org/api/http2.html)
|
||||
|
||||
🟡 Client & server are implemented (95.25% of gRPC's test suite passes). Missing `options.allowHTTP1`, `options.enableConnectProtocol`, ALTSVC extension, and `http2stream.pushStream`.
|
||||
🟡 Client is supported, but server isn't yet.
|
||||
|
||||
### [`node:https`](https://nodejs.org/api/https.html)
|
||||
|
||||
|
||||
32
package.json
32
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.34",
|
||||
"version": "1.1.30",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
@@ -43,7 +43,6 @@
|
||||
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release",
|
||||
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
|
||||
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
|
||||
"css-properties": "bun run src/css/properties/generate_properties.ts",
|
||||
"bump": "bun ./scripts/bump.ts",
|
||||
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
|
||||
"fmt": "bun run prettier",
|
||||
@@ -59,20 +58,19 @@
|
||||
"zig:check": "bun run zig build check --summary new",
|
||||
"zig:check-all": "bun run zig build check-all --summary new",
|
||||
"zig:check-windows": "bun run zig build check-windows --summary new",
|
||||
"analysis": "bun ./scripts/build.mjs -DCMAKE_BUILD_TYPE=Debug -DENABLE_ANALYSIS=ON -DENABLE_CCACHE=OFF -B build/analysis",
|
||||
"analysis:no-llvm": "bun run analysis -DENABLE_LLVM=OFF",
|
||||
"clang-format": "bun run analysis --target clang-format",
|
||||
"clang-format:check": "bun run analysis --target clang-format-check",
|
||||
"clang-format:diff": "bun run analysis --target clang-format-diff",
|
||||
"clang-tidy": "bun run analysis --target clang-tidy",
|
||||
"clang-tidy:check": "bun run analysis --target clang-tidy-check",
|
||||
"clang-tidy:diff": "bun run analysis --target clang-tidy-diff",
|
||||
"zig-format": "bun run analysis:no-llvm --target zig-format",
|
||||
"zig-format:check": "bun run analysis:no-llvm --target zig-format-check",
|
||||
"zig-format:diff": "bun run analysis:no-llvm --target zig-format-diff",
|
||||
"prettier": "bun run analysis:no-llvm --target prettier",
|
||||
"prettier:check": "bun run analysis:no-llvm --target prettier-check",
|
||||
"prettier:extra": "bun run analysis:no-llvm --target prettier-extra",
|
||||
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff"
|
||||
"cmake": "bun ./scripts/build.mjs -DCMAKE_BUILD_TYPE=Debug -DENABLE_ANALYSIS=ON -B build/debug",
|
||||
"clang-format": "bun run cmake --target clang-format",
|
||||
"clang-format:check": "bun run cmake --target clang-format-check",
|
||||
"clang-format:diff": "bun run cmake --target clang-format-diff",
|
||||
"clang-tidy": "bun run cmake --target clang-tidy",
|
||||
"clang-tidy:check": "bun run cmake --target clang-tidy-check",
|
||||
"clang-tidy:diff": "bun run cmake --target clang-tidy-diff",
|
||||
"zig-format": "bun run cmake --target zig-format",
|
||||
"zig-format:check": "bun run cmake --target zig-format-check",
|
||||
"zig-format:diff": "bun run cmake --target zig-format-diff",
|
||||
"prettier": "bun run cmake --target prettier",
|
||||
"prettier:check": "bun run cmake --target prettier-check",
|
||||
"prettier:extra": "bun run cmake --target prettier-extra",
|
||||
"prettier:diff": "bun run cmake --target prettier-diff"
|
||||
}
|
||||
}
|
||||
|
||||
82
packages/bun-types/bun.d.ts
vendored
82
packages/bun-types/bun.d.ts
vendored
@@ -1595,28 +1595,6 @@ declare module "bun" {
|
||||
* @default false
|
||||
*/
|
||||
bytecode?: boolean;
|
||||
/**
|
||||
* Add a banner to the bundled code such as "use client";
|
||||
*/
|
||||
banner?: string;
|
||||
/**
|
||||
* Add a footer to the bundled code such as a comment block like
|
||||
*
|
||||
* `// made with bun!`
|
||||
*/
|
||||
footer?: string;
|
||||
|
||||
/**
|
||||
* **Experimental**
|
||||
*
|
||||
* Enable CSS support.
|
||||
*/
|
||||
experimentalCss?: boolean;
|
||||
|
||||
/**
|
||||
* Drop function calls to matching property accesses.
|
||||
*/
|
||||
drop?: string[];
|
||||
}
|
||||
|
||||
namespace Password {
|
||||
@@ -1650,7 +1628,7 @@ declare module "bun" {
|
||||
* automatically run in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig contributors for their
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
@@ -1753,7 +1731,7 @@ declare module "bun" {
|
||||
* instead which runs in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig contributors for their
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
@@ -1792,7 +1770,7 @@ declare module "bun" {
|
||||
* instead which runs in a worker thread.
|
||||
*
|
||||
* The underlying implementation of these functions are provided by the Zig
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig contributors for their
|
||||
* Standard Library. Thanks to @jedisct1 and other Zig constributors for their
|
||||
* work on this.
|
||||
*
|
||||
* ### Example with argon2
|
||||
@@ -3023,7 +3001,6 @@ declare module "bun" {
|
||||
colors?: boolean;
|
||||
depth?: number;
|
||||
sorted?: boolean;
|
||||
compact?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3039,14 +3016,6 @@ declare module "bun" {
|
||||
* That can be used to declare custom inspect functions.
|
||||
*/
|
||||
const custom: typeof import("util").inspect.custom;
|
||||
|
||||
/**
|
||||
* Pretty-print an object or array as a table
|
||||
*
|
||||
* Like {@link console.table}, except it returns a string
|
||||
*/
|
||||
function table(tabularData: object | unknown[], properties?: string[], options?: { colors?: boolean }): string;
|
||||
function table(tabularData: object | unknown[], options?: { colors?: boolean }): string;
|
||||
}
|
||||
|
||||
interface MMapOptions {
|
||||
@@ -3113,50 +3082,32 @@ declare module "bun" {
|
||||
* @example \x1b[38;2;100;200;200m
|
||||
*/
|
||||
| "ansi"
|
||||
| "ansi-16"
|
||||
| "ansi-16m"
|
||||
/**
|
||||
* 256 color ANSI color string, for use in terminals which don't support true color
|
||||
*
|
||||
* Tries to match closest 24-bit color to 256 color palette
|
||||
*/
|
||||
| "ansi-256"
|
||||
/**
|
||||
* Picks the format that produces the shortest output
|
||||
*/
|
||||
| "css"
|
||||
| "ansi256"
|
||||
/**
|
||||
* Lowercase hex color string without alpha
|
||||
* @example #ff9800
|
||||
* @example #aabb11
|
||||
*/
|
||||
| "hex"
|
||||
/**
|
||||
* Uppercase hex color string without alpha
|
||||
* @example #FF9800
|
||||
*/
|
||||
| "HEX"
|
||||
/**
|
||||
* @example hsl(35.764706, 1, 0.5)
|
||||
*/
|
||||
| "hsl"
|
||||
/**
|
||||
* @example lab(0.72732764, 33.938198, -25.311619)
|
||||
*/
|
||||
| "lab"
|
||||
/**
|
||||
* @example 16750592
|
||||
*/
|
||||
| "number"
|
||||
/**
|
||||
* RGB color string without alpha
|
||||
* @example rgb(255, 152, 0)
|
||||
* rgb(100, 200, 200)
|
||||
*/
|
||||
| "rgb"
|
||||
/**
|
||||
* RGB color string with alpha
|
||||
* @example rgba(255, 152, 0, 1)
|
||||
* rgba(100, 200, 200, 0.5)
|
||||
*/
|
||||
| "rgba",
|
||||
| "rgba"
|
||||
| "hsl"
|
||||
| "lab"
|
||||
| "css"
|
||||
| "lab"
|
||||
| "HEX",
|
||||
): string | null;
|
||||
|
||||
function color(
|
||||
@@ -3249,7 +3200,7 @@ declare module "bun" {
|
||||
}
|
||||
const unsafe: Unsafe;
|
||||
|
||||
type DigestEncoding = "utf8" | "ucs2" | "utf16le" | "latin1" | "ascii" | "base64" | "base64url" | "hex";
|
||||
type DigestEncoding = "hex" | "base64";
|
||||
|
||||
/**
|
||||
* Are ANSI colors enabled for stdin and stdout?
|
||||
@@ -3993,7 +3944,7 @@ declare module "bun" {
|
||||
*
|
||||
* In a future version of Bun, this will be used in error messages.
|
||||
*/
|
||||
name: string;
|
||||
name?: string;
|
||||
|
||||
/**
|
||||
* The target JavaScript environment the plugin should be applied to.
|
||||
@@ -4466,18 +4417,15 @@ declare module "bun" {
|
||||
hostname: string;
|
||||
port: number;
|
||||
tls?: TLSOptions;
|
||||
exclusive?: boolean;
|
||||
}
|
||||
|
||||
interface TCPSocketConnectOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
hostname: string;
|
||||
port: number;
|
||||
tls?: boolean;
|
||||
exclusive?: boolean;
|
||||
}
|
||||
|
||||
interface UnixSocketOptions<Data = undefined> extends SocketOptions<Data> {
|
||||
tls?: TLSOptions;
|
||||
unix: string;
|
||||
}
|
||||
|
||||
|
||||
39
packages/bun-types/globals.d.ts
vendored
39
packages/bun-types/globals.d.ts
vendored
@@ -1673,36 +1673,7 @@ declare global {
|
||||
groupEnd(): void;
|
||||
info(...data: any[]): void;
|
||||
log(...data: any[]): void;
|
||||
/**
|
||||
* Try to construct a table with the columns of the properties of `tabularData` (or use `properties`) and rows of `tabularData` and log it. Falls back to just
|
||||
* logging the argument if it can't be parsed as tabular.
|
||||
*
|
||||
* ```js
|
||||
* // These can't be parsed as tabular data
|
||||
* console.table(Symbol());
|
||||
* // Symbol()
|
||||
*
|
||||
* console.table(undefined);
|
||||
* // undefined
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }]);
|
||||
* // ┌────┬─────┬─────┐
|
||||
* // │ │ a │ b │
|
||||
* // ├────┼─────┼─────┤
|
||||
* // │ 0 │ 1 │ 'Y' │
|
||||
* // │ 1 │ 'Z' │ 2 │
|
||||
* // └────┴─────┴─────┘
|
||||
*
|
||||
* console.table([{ a: 1, b: 'Y' }, { a: 'Z', b: 2 }], ['a']);
|
||||
* // ┌────┬─────┐
|
||||
* // │ │ a │
|
||||
* // ├────┼─────┤
|
||||
* // │ 0 │ 1 │
|
||||
* // │ 1 │ 'Z' │
|
||||
* // └────┴─────┘
|
||||
* ```
|
||||
* @param properties Alternate properties for constructing the table.
|
||||
*/
|
||||
/** Does nothing currently */
|
||||
table(tabularData?: any, properties?: string[]): void;
|
||||
/**
|
||||
* Begin a timer to log with {@link console.timeEnd}
|
||||
@@ -1867,6 +1838,14 @@ declare global {
|
||||
withCredentials?: boolean;
|
||||
}
|
||||
|
||||
interface EventSource extends Bun.EventSource {}
|
||||
var EventSource: typeof globalThis extends {
|
||||
onerror: any;
|
||||
EventSource: infer T;
|
||||
}
|
||||
? T
|
||||
: EventSource;
|
||||
|
||||
interface PromiseConstructor {
|
||||
/**
|
||||
* Create a deferred promise, with exposed `resolve` and `reject` methods which can be called
|
||||
|
||||
9
packages/bun-types/sqlite.d.ts
vendored
9
packages/bun-types/sqlite.d.ts
vendored
@@ -579,15 +579,6 @@ declare module "bun:sqlite" {
|
||||
*/
|
||||
get(...params: ParamsType): ReturnType | null;
|
||||
|
||||
/**
|
||||
* Execute the prepared statement and return an
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the statement is run with the last bound values or no parameters if there are none.
|
||||
*
|
||||
*/
|
||||
iterate(...params: ParamsType): IterableIterator<ReturnType>;
|
||||
[Symbol.iterator](): IterableIterator<ReturnType>;
|
||||
|
||||
/**
|
||||
* Execute the prepared statement. This returns `undefined`.
|
||||
*
|
||||
|
||||
@@ -208,6 +208,10 @@ const writableStream = new WritableStream();
|
||||
const a = new ResolveError();
|
||||
a.level;
|
||||
}
|
||||
{
|
||||
const a = new EventSource("asdf");
|
||||
a.CLOSED;
|
||||
}
|
||||
{
|
||||
const a = new AbortController();
|
||||
a;
|
||||
|
||||
@@ -212,13 +212,12 @@ void us_socket_context_add_server_name(int ssl, struct us_socket_context_t *cont
|
||||
}
|
||||
#endif
|
||||
}
|
||||
int us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
|
||||
void us_bun_socket_context_add_server_name(int ssl, struct us_socket_context_t *context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
return us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
|
||||
us_bun_internal_ssl_socket_context_add_server_name((struct us_internal_ssl_socket_context_t *) context, hostname_pattern, options, user);
|
||||
}
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Remove SNI context */
|
||||
@@ -279,11 +278,11 @@ struct us_socket_context_t *us_create_socket_context(int ssl, struct us_loop_t *
|
||||
return context;
|
||||
}
|
||||
|
||||
struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop_t *loop, int context_ext_size, struct us_bun_socket_context_options_t options, enum create_bun_socket_error_t *err) {
|
||||
struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop_t *loop, int context_ext_size, struct us_bun_socket_context_options_t options) {
|
||||
#ifndef LIBUS_NO_SSL
|
||||
if (ssl) {
|
||||
/* This function will call us, again, with SSL = false and a bigger ext_size */
|
||||
return (struct us_socket_context_t *) us_internal_bun_create_ssl_socket_context(loop, context_ext_size, options, err);
|
||||
return (struct us_socket_context_t *) us_internal_bun_create_ssl_socket_context(loop, context_ext_size, options);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
@@ -855,11 +855,6 @@ create_ssl_context_from_options(struct us_socket_context_options_t options) {
|
||||
}
|
||||
}
|
||||
|
||||
if (ERR_peek_error() != 0) {
|
||||
free_ssl_context(ssl_context);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* This must be free'd with free_ssl_context, not SSL_CTX_free */
|
||||
return ssl_context;
|
||||
}
|
||||
@@ -1109,10 +1104,7 @@ int us_verify_callback(int preverify_ok, X509_STORE_CTX *ctx) {
|
||||
}
|
||||
|
||||
SSL_CTX *create_ssl_context_from_bun_options(
|
||||
struct us_bun_socket_context_options_t options,
|
||||
enum create_bun_socket_error_t *err) {
|
||||
ERR_clear_error();
|
||||
|
||||
struct us_bun_socket_context_options_t options) {
|
||||
/* Create the context */
|
||||
SSL_CTX *ssl_context = SSL_CTX_new(TLS_method());
|
||||
|
||||
@@ -1182,7 +1174,6 @@ SSL_CTX *create_ssl_context_from_bun_options(
|
||||
STACK_OF(X509_NAME) * ca_list;
|
||||
ca_list = SSL_load_client_CA_file(options.ca_file_name);
|
||||
if (ca_list == NULL) {
|
||||
*err = CREATE_BUN_SOCKET_ERROR_LOAD_CA_FILE;
|
||||
free_ssl_context(ssl_context);
|
||||
return NULL;
|
||||
}
|
||||
@@ -1190,7 +1181,6 @@ SSL_CTX *create_ssl_context_from_bun_options(
|
||||
SSL_CTX_set_client_CA_list(ssl_context, ca_list);
|
||||
if (SSL_CTX_load_verify_locations(ssl_context, options.ca_file_name,
|
||||
NULL) != 1) {
|
||||
*err = CREATE_BUN_SOCKET_ERROR_INVALID_CA_FILE;
|
||||
free_ssl_context(ssl_context);
|
||||
return NULL;
|
||||
}
|
||||
@@ -1213,14 +1203,10 @@ SSL_CTX *create_ssl_context_from_bun_options(
|
||||
}
|
||||
|
||||
if (!add_ca_cert_to_ctx_store(ssl_context, options.ca[i], cert_store)) {
|
||||
*err = CREATE_BUN_SOCKET_ERROR_INVALID_CA;
|
||||
free_ssl_context(ssl_context);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// It may return spurious errors here.
|
||||
ERR_clear_error();
|
||||
|
||||
if (options.reject_unauthorized) {
|
||||
SSL_CTX_set_verify(ssl_context,
|
||||
SSL_VERIFY_PEER | SSL_VERIFY_FAIL_IF_NO_PEER_CERT,
|
||||
@@ -1346,17 +1332,13 @@ void us_internal_ssl_socket_context_add_server_name(
|
||||
}
|
||||
}
|
||||
|
||||
int us_bun_internal_ssl_socket_context_add_server_name(
|
||||
void us_bun_internal_ssl_socket_context_add_server_name(
|
||||
struct us_internal_ssl_socket_context_t *context,
|
||||
const char *hostname_pattern,
|
||||
struct us_bun_socket_context_options_t options, void *user) {
|
||||
|
||||
/* Try and construct an SSL_CTX from options */
|
||||
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
|
||||
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options, &err);
|
||||
if (ssl_context == NULL) {
|
||||
return -1;
|
||||
}
|
||||
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options);
|
||||
|
||||
/* Attach the user data to this context */
|
||||
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
|
||||
@@ -1364,15 +1346,15 @@ int us_bun_internal_ssl_socket_context_add_server_name(
|
||||
printf("CANNOT SET EX DATA!\n");
|
||||
abort();
|
||||
#endif
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
|
||||
/* If we already had that name, ignore */
|
||||
free_ssl_context(ssl_context);
|
||||
/* We do not want to hold any nullptr's in our SNI tree */
|
||||
if (ssl_context) {
|
||||
if (sni_add(context->sni, hostname_pattern, ssl_context)) {
|
||||
/* If we already had that name, ignore */
|
||||
free_ssl_context(ssl_context);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
void us_internal_ssl_socket_context_on_server_name(
|
||||
@@ -1486,15 +1468,14 @@ struct us_internal_ssl_socket_context_t *us_internal_create_ssl_socket_context(
|
||||
struct us_internal_ssl_socket_context_t *
|
||||
us_internal_bun_create_ssl_socket_context(
|
||||
struct us_loop_t *loop, int context_ext_size,
|
||||
struct us_bun_socket_context_options_t options,
|
||||
enum create_bun_socket_error_t *err) {
|
||||
struct us_bun_socket_context_options_t options) {
|
||||
/* If we haven't initialized the loop data yet, do so .
|
||||
* This is needed because loop data holds shared OpenSSL data and
|
||||
* the function is also responsible for initializing OpenSSL */
|
||||
us_internal_init_loop_ssl_data(loop);
|
||||
|
||||
/* First of all we try and create the SSL context from options */
|
||||
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options, err);
|
||||
SSL_CTX *ssl_context = create_ssl_context_from_bun_options(options);
|
||||
if (!ssl_context) {
|
||||
/* We simply fail early if we cannot even create the OpenSSL context */
|
||||
return NULL;
|
||||
@@ -1506,7 +1487,7 @@ us_internal_bun_create_ssl_socket_context(
|
||||
(struct us_internal_ssl_socket_context_t *)us_create_bun_socket_context(
|
||||
0, loop,
|
||||
sizeof(struct us_internal_ssl_socket_context_t) + context_ext_size,
|
||||
options, err);
|
||||
options);
|
||||
|
||||
/* I guess this is the only optional callback */
|
||||
context->on_server_name = NULL;
|
||||
@@ -2002,10 +1983,9 @@ struct us_internal_ssl_socket_t *us_internal_ssl_socket_wrap_with_tls(
|
||||
struct us_socket_context_t *old_context = us_socket_context(0, s);
|
||||
us_socket_context_ref(0,old_context);
|
||||
|
||||
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
|
||||
struct us_socket_context_t *context = us_create_bun_socket_context(
|
||||
1, old_context->loop, sizeof(struct us_wrapped_socket_context_t),
|
||||
options, &err);
|
||||
options);
|
||||
|
||||
// Handle SSL context creation failure
|
||||
if (UNLIKELY(!context)) {
|
||||
|
||||
@@ -302,7 +302,7 @@ void us_internal_ssl_socket_context_add_server_name(
|
||||
us_internal_ssl_socket_context_r context,
|
||||
const char *hostname_pattern, struct us_socket_context_options_t options,
|
||||
void *user);
|
||||
int us_bun_internal_ssl_socket_context_add_server_name(
|
||||
void us_bun_internal_ssl_socket_context_add_server_name(
|
||||
us_internal_ssl_socket_context_r context,
|
||||
const char *hostname_pattern,
|
||||
struct us_bun_socket_context_options_t options, void *user);
|
||||
@@ -330,8 +330,7 @@ struct us_internal_ssl_socket_context_t *us_internal_create_ssl_socket_context(
|
||||
struct us_internal_ssl_socket_context_t *
|
||||
us_internal_bun_create_ssl_socket_context(
|
||||
struct us_loop_t *loop, int context_ext_size,
|
||||
struct us_bun_socket_context_options_t options,
|
||||
enum create_bun_socket_error_t *err);
|
||||
struct us_bun_socket_context_options_t options);
|
||||
|
||||
void us_internal_ssl_socket_context_free(
|
||||
us_internal_ssl_socket_context_r context);
|
||||
|
||||
@@ -234,7 +234,7 @@ unsigned short us_socket_context_timestamp(int ssl, us_socket_context_r context)
|
||||
|
||||
/* Adds SNI domain and cert in asn1 format */
|
||||
void us_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_socket_context_options_t options, void *user);
|
||||
int us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
|
||||
void us_bun_socket_context_add_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern, struct us_bun_socket_context_options_t options, void *user);
|
||||
void us_socket_context_remove_server_name(int ssl, us_socket_context_r context, const char *hostname_pattern);
|
||||
void us_socket_context_on_server_name(int ssl, us_socket_context_r context, void (*cb)(us_socket_context_r context, const char *hostname));
|
||||
void *us_socket_server_name_userdata(int ssl, us_socket_r s);
|
||||
@@ -246,16 +246,8 @@ void *us_socket_context_get_native_handle(int ssl, us_socket_context_r context);
|
||||
/* A socket context holds shared callbacks and user data extension for associated sockets */
|
||||
struct us_socket_context_t *us_create_socket_context(int ssl, us_loop_r loop,
|
||||
int ext_size, struct us_socket_context_options_t options) nonnull_fn_decl;
|
||||
|
||||
enum create_bun_socket_error_t {
|
||||
CREATE_BUN_SOCKET_ERROR_NONE = 0,
|
||||
CREATE_BUN_SOCKET_ERROR_LOAD_CA_FILE,
|
||||
CREATE_BUN_SOCKET_ERROR_INVALID_CA_FILE,
|
||||
CREATE_BUN_SOCKET_ERROR_INVALID_CA,
|
||||
};
|
||||
|
||||
struct us_socket_context_t *us_create_bun_socket_context(int ssl, struct us_loop_t *loop,
|
||||
int ext_size, struct us_bun_socket_context_options_t options, enum create_bun_socket_error_t *err);
|
||||
int ext_size, struct us_bun_socket_context_options_t options);
|
||||
|
||||
/* Delete resources allocated at creation time (will call unref now and only free when ref count == 0). */
|
||||
void us_socket_context_free(int ssl, us_socket_context_r context) nonnull_fn_decl;
|
||||
|
||||
@@ -16,7 +16,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
// clang-format off
|
||||
|
||||
#ifndef UWS_APP_H
|
||||
#define UWS_APP_H
|
||||
|
||||
#include <string>
|
||||
#include <charconv>
|
||||
@@ -105,17 +106,14 @@ public:
|
||||
|
||||
|
||||
/* Server name */
|
||||
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}, bool *success = nullptr) {
|
||||
TemplatedApp &&addServerName(std::string hostname_pattern, SocketContextOptions options = {}) {
|
||||
|
||||
/* Do nothing if not even on SSL */
|
||||
if constexpr (SSL) {
|
||||
/* First we create a new router for this domain */
|
||||
auto *domainRouter = new HttpRouter<typename HttpContextData<SSL>::RouterData>();
|
||||
|
||||
int result = us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
|
||||
if (success) {
|
||||
*success = result == 0;
|
||||
}
|
||||
us_bun_socket_context_add_server_name(SSL, (struct us_socket_context_t *) httpContext, hostname_pattern.c_str(), options, domainRouter);
|
||||
}
|
||||
|
||||
return std::move(*this);
|
||||
@@ -240,18 +238,6 @@ public:
|
||||
httpContext = HttpContext<SSL>::create(Loop::get(), options);
|
||||
}
|
||||
|
||||
TemplatedApp(HttpContext<SSL> &context) {
|
||||
httpContext = &context;
|
||||
}
|
||||
|
||||
static TemplatedApp<SSL>* create(SocketContextOptions options = {}) {
|
||||
auto* httpContext = HttpContext<SSL>::create(Loop::get(), options);
|
||||
if (!httpContext) {
|
||||
return nullptr;
|
||||
}
|
||||
return new TemplatedApp<SSL>(*httpContext);
|
||||
}
|
||||
|
||||
bool constructorFailed() {
|
||||
return !httpContext;
|
||||
}
|
||||
@@ -618,3 +604,4 @@ typedef TemplatedApp<true> SSLApp;
|
||||
|
||||
}
|
||||
|
||||
#endif // UWS_APP_H
|
||||
@@ -16,7 +16,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#ifndef UWS_HTTPCONTEXT_H
|
||||
#define UWS_HTTPCONTEXT_H
|
||||
|
||||
/* This class defines the main behavior of HTTP and emits various events */
|
||||
|
||||
@@ -26,8 +27,6 @@
|
||||
#include "AsyncSocket.h"
|
||||
#include "WebSocketData.h"
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <string_view>
|
||||
#include <iostream>
|
||||
#include "MoveOnlyFunction.h"
|
||||
@@ -172,7 +171,7 @@ private:
|
||||
#endif
|
||||
|
||||
/* The return value is entirely up to us to interpret. The HttpParser only care for whether the returned value is DIFFERENT or not from passed user */
|
||||
auto [err, returnedSocket] = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
|
||||
void *returnedSocket = httpResponseData->consumePostPadded(data, (unsigned int) length, s, proxyParser, [httpContextData](void *s, HttpRequest *httpRequest) -> void * {
|
||||
/* For every request we reset the timeout and hang until user makes action */
|
||||
/* Warning: if we are in shutdown state, resetting the timer is a security issue! */
|
||||
us_socket_timeout(SSL, (us_socket_t *) s, 0);
|
||||
@@ -181,9 +180,7 @@ private:
|
||||
HttpResponseData<SSL> *httpResponseData = (HttpResponseData<SSL> *) us_socket_ext(SSL, (us_socket_t *) s);
|
||||
httpResponseData->offset = 0;
|
||||
|
||||
/* Are we not ready for another request yet? Terminate the connection.
|
||||
* Important for denying async pipelining until, if ever, we want to suppot it.
|
||||
* Otherwise requests can get mixed up on the same connection. We still support sync pipelining. */
|
||||
/* Are we not ready for another request yet? Terminate the connection. */
|
||||
if (httpResponseData->state & HttpResponseData<SSL>::HTTP_RESPONSE_PENDING) {
|
||||
us_socket_close(SSL, (us_socket_t *) s, 0, nullptr);
|
||||
return nullptr;
|
||||
@@ -283,6 +280,10 @@ private:
|
||||
}
|
||||
}
|
||||
return user;
|
||||
}, [](void *user) {
|
||||
/* Close any socket on HTTP errors */
|
||||
us_socket_close(SSL, (us_socket_t *) user, 0, nullptr);
|
||||
return nullptr;
|
||||
});
|
||||
|
||||
/* Mark that we are no longer parsing Http */
|
||||
@@ -290,9 +291,6 @@ private:
|
||||
|
||||
/* If we got fullptr that means the parser wants us to close the socket from error (same as calling the errorHandler) */
|
||||
if (returnedSocket == FULLPTR) {
|
||||
/* For errors, we only deliver them "at most once". We don't care if they get halfways delivered or not. */
|
||||
us_socket_write(SSL, s, httpErrorResponses[err].data(), (int) httpErrorResponses[err].length(), false);
|
||||
us_socket_shutdown(SSL, s);
|
||||
/* Close any socket on HTTP errors */
|
||||
us_socket_close(SSL, s, 0, nullptr);
|
||||
/* This just makes the following code act as if the socket was closed from error inside the parser. */
|
||||
@@ -301,8 +299,9 @@ private:
|
||||
|
||||
/* We need to uncork in all cases, except for nullptr (closed socket, or upgraded socket) */
|
||||
if (returnedSocket != nullptr) {
|
||||
us_socket_t* returnedSocketPtr = (us_socket_t*) returnedSocket;
|
||||
/* We don't want open sockets to keep the event loop alive between HTTP requests */
|
||||
us_socket_unref((us_socket_t *) returnedSocket);
|
||||
us_socket_unref(returnedSocketPtr);
|
||||
|
||||
/* Timeout on uncork failure */
|
||||
auto [written, failed] = ((AsyncSocket<SSL> *) returnedSocket)->uncork();
|
||||
@@ -322,7 +321,7 @@ private:
|
||||
}
|
||||
}
|
||||
}
|
||||
return (us_socket_t *) returnedSocket;
|
||||
return returnedSocketPtr;
|
||||
}
|
||||
|
||||
/* If we upgraded, check here (differ between nullptr close and nullptr upgrade) */
|
||||
@@ -434,8 +433,7 @@ public:
|
||||
static HttpContext *create(Loop *loop, us_bun_socket_context_options_t options = {}) {
|
||||
HttpContext *httpContext;
|
||||
|
||||
enum create_bun_socket_error_t err = CREATE_BUN_SOCKET_ERROR_NONE;
|
||||
httpContext = (HttpContext *) us_create_bun_socket_context(SSL, (us_loop_t *) loop, sizeof(HttpContextData<SSL>), options, &err);
|
||||
httpContext = (HttpContext *) us_create_bun_socket_context(SSL, (us_loop_t *) loop, sizeof(HttpContextData<SSL>), options);
|
||||
|
||||
if (!httpContext) {
|
||||
return nullptr;
|
||||
@@ -484,27 +482,10 @@ public:
|
||||
return;
|
||||
}
|
||||
|
||||
/* Record this route's parameter offsets */
|
||||
std::map<std::string, unsigned short, std::less<>> parameterOffsets;
|
||||
unsigned short offset = 0;
|
||||
for (unsigned int i = 0; i < pattern.length(); i++) {
|
||||
if (pattern[i] == ':') {
|
||||
i++;
|
||||
unsigned int start = i;
|
||||
while (i < pattern.length() && pattern[i] != '/') {
|
||||
i++;
|
||||
}
|
||||
parameterOffsets[std::string(pattern.data() + start, i - start)] = offset;
|
||||
//std::cout << "<" << std::string(pattern.data() + start, i - start) << "> is offset " << offset;
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
|
||||
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler), parameterOffsets = std::move(parameterOffsets)](auto *r) mutable {
|
||||
httpContextData->currentRouter->add(methods, pattern, [handler = std::move(handler)](auto *r) mutable {
|
||||
auto user = r->getUserData();
|
||||
user.httpRequest->setYield(false);
|
||||
user.httpRequest->setParameters(r->getParameters());
|
||||
user.httpRequest->setParameterOffsets(¶meterOffsets);
|
||||
|
||||
/* Middleware? Automatically respond to expectations */
|
||||
std::string_view expect = user.httpRequest->getHeader("expect");
|
||||
@@ -546,4 +527,4 @@ public:
|
||||
|
||||
}
|
||||
|
||||
|
||||
#endif // UWS_HTTPCONTEXT_H
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
/*
|
||||
* Authored by Alex Hultman, 2018-2023.
|
||||
* Intellectual property of third-party.
|
||||
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#ifndef UWS_HTTP_ERRORS
|
||||
#define UWS_HTTP_ERRORS
|
||||
|
||||
#include <string_view>
|
||||
|
||||
namespace uWS {
|
||||
/* Possible errors from http parsing */
|
||||
enum HttpError {
|
||||
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
|
||||
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
|
||||
HTTP_ERROR_400_BAD_REQUEST = 3
|
||||
};
|
||||
|
||||
#ifndef UWS_HTTPRESPONSE_NO_WRITEMARK
|
||||
|
||||
/* Returned parser errors match this LUT. */
|
||||
static const std::string_view httpErrorResponses[] = {
|
||||
"", /* Zeroth place is no error so don't use it */
|
||||
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n<h1>HTTP Version Not Supported</h1><p>This server does not support HTTP/1.0.</p><hr><i>uWebSockets/20 Server</i>",
|
||||
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n<h1>Request Header Fields Too Large</h1><hr><i>uWebSockets/20 Server</i>",
|
||||
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n<h1>Bad Request</h1><hr><i>uWebSockets/20 Server</i>",
|
||||
};
|
||||
|
||||
#else
|
||||
/* Anonymized pages */
|
||||
static const std::string_view httpErrorResponses[] = {
|
||||
"", /* Zeroth place is no error so don't use it */
|
||||
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
|
||||
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
|
||||
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
|
||||
};
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,42 +0,0 @@
|
||||
/*
|
||||
* Authored by Alex Hultman, 2018-2023.
|
||||
* Intellectual property of third-party.
|
||||
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string_view>
|
||||
|
||||
namespace uWS {
|
||||
/* Possible errors from http parsing */
|
||||
enum HttpError {
|
||||
HTTP_ERROR_505_HTTP_VERSION_NOT_SUPPORTED = 1,
|
||||
HTTP_ERROR_431_REQUEST_HEADER_FIELDS_TOO_LARGE = 2,
|
||||
HTTP_ERROR_400_BAD_REQUEST = 3
|
||||
};
|
||||
|
||||
|
||||
/* Anonymized pages */
|
||||
static const std::string_view httpErrorResponses[] = {
|
||||
"", /* Zeroth place is no error so don't use it */
|
||||
"HTTP/1.1 505 HTTP Version Not Supported\r\nConnection: close\r\n\r\n",
|
||||
"HTTP/1.1 431 Request Header Fields Too Large\r\nConnection: close\r\n\r\n",
|
||||
"HTTP/1.1 400 Bad Request\r\nConnection: close\r\n\r\n"
|
||||
};
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,7 +15,8 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
// clang-format off
|
||||
#pragma once
|
||||
#ifndef UWS_HTTPRESPONSEDATA_H
|
||||
#define UWS_HTTPRESPONSEDATA_H
|
||||
|
||||
/* This data belongs to the HttpResponse */
|
||||
|
||||
@@ -105,4 +106,4 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
|
||||
}
|
||||
|
||||
|
||||
#endif // UWS_HTTPRESPONSEDATA_H
|
||||
|
||||
@@ -15,7 +15,9 @@
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
#ifndef UWS_TOPICTREE_H
|
||||
#define UWS_TOPICTREE_H
|
||||
|
||||
#include <map>
|
||||
#include <list>
|
||||
#include <iostream>
|
||||
@@ -364,4 +366,4 @@ public:
|
||||
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
10
root.zig
10
root.zig
@@ -3,6 +3,16 @@ pub usingnamespace @import("./src/main.zig");
|
||||
/// These functions are used throughout Bun's codebase.
|
||||
pub const bun = @import("./src/bun.zig");
|
||||
|
||||
pub const content = struct {
|
||||
pub const error_js_path = "packages/bun-error/dist/index.js";
|
||||
pub const error_js = @embedFile(error_js_path);
|
||||
|
||||
pub const error_css_path = "packages/bun-error/dist/bun-error.css";
|
||||
pub const error_css_path_dev = "packages/bun-error/bun-error.css";
|
||||
|
||||
pub const error_css = @embedFile(error_css_path);
|
||||
};
|
||||
|
||||
pub const completions = struct {
|
||||
pub const bash = @embedFile("./completions/bun.bash");
|
||||
pub const zsh = @embedFile("./completions/bun.zsh");
|
||||
|
||||
@@ -2,6 +2,16 @@ pub usingnamespace @import("src/main_wasm.zig");
|
||||
|
||||
pub const bun = @import("src/bun.zig");
|
||||
|
||||
pub const content = struct {
|
||||
pub const error_js_path = "packages/bun-error/dist/index.js";
|
||||
pub const error_js = @embedFile(error_js_path);
|
||||
|
||||
pub const error_css_path = "packages/bun-error/dist/bun-error.css";
|
||||
pub const error_css_path_dev = "packages/bun-error/bun-error.css";
|
||||
|
||||
pub const error_css = @embedFile(error_css_path);
|
||||
};
|
||||
|
||||
pub const completions = struct {};
|
||||
pub const is_bindgen = true;
|
||||
pub const JavaScriptCore = struct {
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
const body = process.env.GITHUB_ISSUE_BODY;
|
||||
const SENTRY_AUTH_TOKEN = process.env.SENTRY_AUTH_TOKEN;
|
||||
|
||||
if (!body || !SENTRY_AUTH_TOKEN) {
|
||||
throw new Error("Missing environment variables");
|
||||
}
|
||||
|
||||
const id = body.indexOf("<!-- sentry_id: ");
|
||||
const endIdLine = body.indexOf(" -->", id + 1);
|
||||
if (!(id > -1 && endIdLine > -1)) {
|
||||
throw new Error("Missing sentry_id");
|
||||
}
|
||||
const sentryId = body.slice(id + "<!-- sentry_id: ".length, endIdLine).trim();
|
||||
if (!sentryId) {
|
||||
throw new Error("Missing sentry_id");
|
||||
}
|
||||
|
||||
const response = await fetch(`https://sentry.io/api/0/organizations/4507155222364160/eventids/${sentryId}/`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch Sentry event: ${response.statusText}`);
|
||||
}
|
||||
const json = await response.json();
|
||||
const groupId = json?.groupId;
|
||||
if (!groupId) {
|
||||
throw new Error("Missing groupId");
|
||||
}
|
||||
|
||||
const issueResponse = await fetch(`https://sentry.io/api/0/issues/${groupId}/`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${SENTRY_AUTH_TOKEN}`,
|
||||
},
|
||||
});
|
||||
if (!issueResponse.ok) {
|
||||
throw new Error(`Failed to fetch Sentry issue: ${issueResponse.statusText}`);
|
||||
}
|
||||
const { shortId, permalink } = await issueResponse.json();
|
||||
if (!shortId || !permalink) {
|
||||
throw new Error("Missing shortId or permalink");
|
||||
}
|
||||
|
||||
console.log(`Sentry ID: ${shortId}`);
|
||||
console.log(`Sentry permalink: ${permalink}`);
|
||||
|
||||
await Bun.write("sentry-id.txt", shortId);
|
||||
await Bun.write("sentry-link.txt", permalink);
|
||||
|
||||
export {};
|
||||
@@ -1,714 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
|
||||
# If this script does not work on your machine, please open an issue:
|
||||
# https://github.com/oven-sh/bun/issues
|
||||
|
||||
# If you need to make a change to this script, such as upgrading a dependency,
|
||||
# increment the version number to indicate that a new image should be built.
|
||||
# Otherwise, the existing image will be retroactively updated.
|
||||
v="3"
|
||||
pid=$$
|
||||
script="$(realpath "$0")"
|
||||
|
||||
print() {
|
||||
echo "$@"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo "error: $@" >&2
|
||||
kill -s TERM "$pid"
|
||||
exit 1
|
||||
}
|
||||
|
||||
execute() {
|
||||
print "$ $@" >&2
|
||||
if ! "$@"; then
|
||||
error "Command failed: $@"
|
||||
fi
|
||||
}
|
||||
|
||||
execute_sudo() {
|
||||
if [ "$sudo" = "1" ]; then
|
||||
execute "$@"
|
||||
else
|
||||
execute sudo "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
execute_non_root() {
|
||||
if [ "$sudo" = "1" ]; then
|
||||
execute sudo -u "$user" "$@"
|
||||
else
|
||||
execute "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
which() {
|
||||
command -v "$1"
|
||||
}
|
||||
|
||||
require() {
|
||||
path="$(which "$1")"
|
||||
if ! [ -f "$path" ]; then
|
||||
error "Command \"$1\" is required, but is not installed."
|
||||
fi
|
||||
echo "$path"
|
||||
}
|
||||
|
||||
fetch() {
|
||||
curl=$(which curl)
|
||||
if [ -f "$curl" ]; then
|
||||
execute "$curl" -fsSL "$1"
|
||||
else
|
||||
wget=$(which wget)
|
||||
if [ -f "$wget" ]; then
|
||||
execute "$wget" -qO- "$1"
|
||||
else
|
||||
error "Command \"curl\" or \"wget\" is required, but is not installed."
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
download_file() {
|
||||
url="$1"
|
||||
filename="${2:-$(basename "$url")}"
|
||||
path="$(mktemp -d)/$filename"
|
||||
|
||||
fetch "$url" > "$path"
|
||||
print "$path"
|
||||
}
|
||||
|
||||
compare_version() {
|
||||
if [ "$1" = "$2" ]; then
|
||||
echo "0"
|
||||
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
|
||||
echo "-1"
|
||||
else
|
||||
echo "1"
|
||||
fi
|
||||
}
|
||||
|
||||
append_to_file() {
|
||||
file="$1"
|
||||
content="$2"
|
||||
|
||||
if ! [ -f "$file" ]; then
|
||||
execute mkdir -p "$(dirname "$file")"
|
||||
execute touch "$file"
|
||||
fi
|
||||
|
||||
echo "$content" | while read -r line; do
|
||||
if ! grep -q "$line" "$file"; then
|
||||
echo "$line" >> "$file"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
append_to_profile() {
|
||||
content="$1"
|
||||
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
|
||||
for profile in $profiles; do
|
||||
file="$HOME/$profile"
|
||||
if [ "$ci" = "1" ] || [ -f "$file" ]; then
|
||||
append_to_file "$file" "$content"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
append_to_path() {
|
||||
path="$1"
|
||||
if ! [ -d "$path" ]; then
|
||||
error "Could not find directory: \"$path\""
|
||||
fi
|
||||
|
||||
append_to_profile "export PATH=\"$path:\$PATH\""
|
||||
export PATH="$path:$PATH"
|
||||
}
|
||||
|
||||
check_system() {
|
||||
uname="$(require uname)"
|
||||
|
||||
os="$($uname -s)"
|
||||
case "$os" in
|
||||
Linux*) os="linux" ;;
|
||||
Darwin*) os="darwin" ;;
|
||||
*) error "Unsupported operating system: $os" ;;
|
||||
esac
|
||||
|
||||
arch="$($uname -m)"
|
||||
case "$arch" in
|
||||
x86_64 | x64 | amd64) arch="x64" ;;
|
||||
aarch64 | arm64) arch="aarch64" ;;
|
||||
*) error "Unsupported architecture: $arch" ;;
|
||||
esac
|
||||
|
||||
kernel="$(uname -r)"
|
||||
|
||||
if [ "$os" = "darwin" ]; then
|
||||
sw_vers="$(which sw_vers)"
|
||||
if [ -f "$sw_vers" ]; then
|
||||
distro="$($sw_vers -productName)"
|
||||
release="$($sw_vers -productVersion)"
|
||||
fi
|
||||
|
||||
if [ "$arch" = "x64" ]; then
|
||||
sysctl="$(which sysctl)"
|
||||
if [ -f "$sysctl" ] && [ "$($sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then
|
||||
arch="aarch64"
|
||||
rosetta="1"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$os" = "linux" ] && [ -f /etc/os-release ]; then
|
||||
. /etc/os-release
|
||||
if [ -n "$ID" ]; then
|
||||
distro="$ID"
|
||||
fi
|
||||
if [ -n "$VERSION_ID" ]; then
|
||||
release="$VERSION_ID"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$os" = "linux" ]; then
|
||||
rpm="$(which rpm)"
|
||||
if [ -f "$rpm" ]; then
|
||||
glibc="$($rpm -q glibc --queryformat '%{VERSION}\n')"
|
||||
else
|
||||
ldd="$(which ldd)"
|
||||
awk="$(which awk)"
|
||||
if [ -f "$ldd" ] && [ -f "$awk" ]; then
|
||||
glibc="$($ldd --version | $awk 'NR==1{print $NF}')"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$os" = "darwin" ]; then
|
||||
brew="$(which brew)"
|
||||
pm="brew"
|
||||
fi
|
||||
|
||||
if [ "$os" = "linux" ]; then
|
||||
apt="$(which apt-get)"
|
||||
if [ -f "$apt" ]; then
|
||||
pm="apt"
|
||||
else
|
||||
dnf="$(which dnf)"
|
||||
if [ -f "$dnf" ]; then
|
||||
pm="dnf"
|
||||
else
|
||||
yum="$(which yum)"
|
||||
if [ -f "$yum" ]; then
|
||||
pm="yum"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$pm" ]; then
|
||||
error "No package manager found. (apt, dnf, yum)"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -n "$SUDO_USER" ]; then
|
||||
user="$SUDO_USER"
|
||||
else
|
||||
whoami="$(which whoami)"
|
||||
if [ -f "$whoami" ]; then
|
||||
user="$($whoami)"
|
||||
else
|
||||
error "Could not determine the current user, set \$USER."
|
||||
fi
|
||||
fi
|
||||
|
||||
id="$(which id)"
|
||||
if [ -f "$id" ] && [ "$($id -u)" = "0" ]; then
|
||||
sudo=1
|
||||
fi
|
||||
|
||||
if [ "$CI" = "true" ]; then
|
||||
ci=1
|
||||
fi
|
||||
|
||||
print "System information:"
|
||||
if [ -n "$distro" ]; then
|
||||
print "| Distro: $distro $release"
|
||||
fi
|
||||
print "| Operating system: $os"
|
||||
print "| Architecture: $arch"
|
||||
if [ -n "$rosetta" ]; then
|
||||
print "| Rosetta: true"
|
||||
fi
|
||||
if [ -n "$glibc" ]; then
|
||||
print "| Glibc: $glibc"
|
||||
fi
|
||||
print "| Package manager: $pm"
|
||||
print "| User: $user"
|
||||
if [ -n "$sudo" ]; then
|
||||
print "| Sudo: true"
|
||||
fi
|
||||
if [ -n "$ci" ]; then
|
||||
print "| CI: true"
|
||||
fi
|
||||
}
|
||||
|
||||
package_manager() {
|
||||
case "$pm" in
|
||||
apt) DEBIAN_FRONTEND=noninteractive \
|
||||
execute "$apt" "$@" ;;
|
||||
dnf) execute dnf "$@" ;;
|
||||
yum) execute "$yum" "$@" ;;
|
||||
brew)
|
||||
if ! [ -f "$(which brew)" ]; then
|
||||
install_brew
|
||||
fi
|
||||
execute_non_root brew "$@"
|
||||
;;
|
||||
*) error "Unsupported package manager: $pm" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
update_packages() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
package_manager update
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
check_package() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
apt-cache policy "$1"
|
||||
;;
|
||||
dnf | yum | brew)
|
||||
package_manager info "$1"
|
||||
;;
|
||||
*)
|
||||
error "Unsupported package manager: $pm"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_packages() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
package_manager install --yes --no-install-recommends "$@"
|
||||
;;
|
||||
dnf)
|
||||
package_manager install --assumeyes --nodocs --noautoremove --allowerasing "$@"
|
||||
;;
|
||||
yum)
|
||||
package_manager install -y "$@"
|
||||
;;
|
||||
brew)
|
||||
package_manager install --force --formula "$@"
|
||||
package_manager link --force --overwrite "$@"
|
||||
;;
|
||||
*)
|
||||
error "Unsupported package manager: $pm"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
get_version() {
|
||||
command="$1"
|
||||
path="$(which "$command")"
|
||||
|
||||
if [ -f "$path" ]; then
|
||||
case "$command" in
|
||||
go | zig) "$path" version ;;
|
||||
*) "$path" --version ;;
|
||||
esac
|
||||
else
|
||||
print "not found"
|
||||
fi
|
||||
}
|
||||
|
||||
install_brew() {
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
|
||||
NONINTERACTIVE=1 execute_non_root "$bash" "$script"
|
||||
|
||||
case "$arch" in
|
||||
x64)
|
||||
append_to_path "/usr/local/bin"
|
||||
;;
|
||||
aarch64)
|
||||
append_to_path "/opt/homebrew/bin"
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$ci" in
|
||||
1)
|
||||
append_to_profile "export HOMEBREW_NO_INSTALL_CLEANUP=1"
|
||||
append_to_profile "export HOMEBREW_NO_AUTO_UPDATE=1"
|
||||
append_to_profile "export HOMEBREW_NO_ANALYTICS=1"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_common_software() {
|
||||
case "$pm" in
|
||||
apt) install_packages \
|
||||
apt-transport-https \
|
||||
software-properties-common
|
||||
;;
|
||||
dnf) install_packages \
|
||||
dnf-plugins-core \
|
||||
tar
|
||||
;;
|
||||
esac
|
||||
|
||||
install_packages \
|
||||
bash \
|
||||
ca-certificates \
|
||||
curl \
|
||||
jq \
|
||||
htop \
|
||||
gnupg \
|
||||
git \
|
||||
unzip \
|
||||
wget \
|
||||
zip
|
||||
|
||||
install_rosetta
|
||||
install_nodejs
|
||||
install_bun
|
||||
}
|
||||
|
||||
install_nodejs() {
|
||||
version="${1:-"22"}"
|
||||
|
||||
if ! [ "$(compare_version "$glibc" "2.27")" = "1" ]; then
|
||||
version="16"
|
||||
fi
|
||||
|
||||
case "$pm" in
|
||||
dnf | yum)
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://rpm.nodesource.com/setup_$version.x")
|
||||
execute "$bash" "$script"
|
||||
;;
|
||||
apt)
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://deb.nodesource.com/setup_$version.x")
|
||||
execute "$bash" "$script"
|
||||
;;
|
||||
esac
|
||||
|
||||
install_packages nodejs
|
||||
}
|
||||
|
||||
install_bun() {
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://bun.sh/install")
|
||||
|
||||
version="${1:-"latest"}"
|
||||
case "$version" in
|
||||
latest)
|
||||
execute "$bash" "$script"
|
||||
;;
|
||||
*)
|
||||
execute "$bash" "$script" -s "$version"
|
||||
;;
|
||||
esac
|
||||
|
||||
append_to_path "$HOME/.bun/bin"
|
||||
}
|
||||
|
||||
install_rosetta() {
|
||||
case "$os" in
|
||||
darwin)
|
||||
if ! [ "$(which arch)" ]; then
|
||||
execute softwareupdate \
|
||||
--install-rosetta \
|
||||
--agree-to-license
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_build_essentials() {
|
||||
case "$pm" in
|
||||
apt) install_packages \
|
||||
build-essential \
|
||||
ninja-build \
|
||||
xz-utils
|
||||
;;
|
||||
dnf | yum) install_packages \
|
||||
ninja-build \
|
||||
gcc-c++ \
|
||||
xz
|
||||
;;
|
||||
brew) install_packages \
|
||||
ninja
|
||||
;;
|
||||
esac
|
||||
|
||||
install_packages \
|
||||
make \
|
||||
cmake \
|
||||
pkg-config \
|
||||
python3 \
|
||||
libtool \
|
||||
ruby \
|
||||
perl \
|
||||
golang
|
||||
|
||||
install_llvm
|
||||
install_ccache
|
||||
install_rust
|
||||
install_docker
|
||||
}
|
||||
|
||||
llvm_version_exact() {
|
||||
case "$os" in
|
||||
linux)
|
||||
print "16.0.6"
|
||||
;;
|
||||
darwin | windows)
|
||||
print "18.1.8"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
llvm_version() {
|
||||
echo "$(llvm_version_exact)" | cut -d. -f1
|
||||
}
|
||||
|
||||
install_llvm() {
|
||||
case "$pm" in
|
||||
apt)
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://apt.llvm.org/llvm.sh")
|
||||
execute "$bash" "$script" "$(llvm_version)" all
|
||||
;;
|
||||
brew)
|
||||
install_packages "llvm@$(llvm_version)"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_ccache() {
|
||||
case "$pm" in
|
||||
apt | brew)
|
||||
install_packages ccache
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_rust() {
|
||||
sh="$(require sh)"
|
||||
script=$(download_file "https://sh.rustup.rs")
|
||||
execute "$sh" "$script" -y
|
||||
append_to_path "$HOME/.cargo/bin"
|
||||
}
|
||||
|
||||
install_docker() {
|
||||
case "$pm" in
|
||||
brew)
|
||||
if ! [ -d "/Applications/Docker.app" ]; then
|
||||
package_manager install docker --cask
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
case "$distro-$release" in
|
||||
amzn-2 | amzn-1)
|
||||
execute amazon-linux-extras install docker
|
||||
;;
|
||||
amzn-*)
|
||||
install_packages docker
|
||||
;;
|
||||
*)
|
||||
sh="$(require sh)"
|
||||
script=$(download_file "https://get.docker.com")
|
||||
execute "$sh" "$script"
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
|
||||
systemctl="$(which systemctl)"
|
||||
if [ -f "$systemctl" ]; then
|
||||
execute "$systemctl" enable docker
|
||||
fi
|
||||
}
|
||||
|
||||
install_ci_dependencies() {
|
||||
if ! [ "$ci" = "1" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
install_tailscale
|
||||
install_buildkite
|
||||
}
|
||||
|
||||
install_tailscale() {
|
||||
case "$os" in
|
||||
linux)
|
||||
sh="$(require sh)"
|
||||
script=$(download_file "https://tailscale.com/install.sh")
|
||||
execute "$sh" "$script"
|
||||
;;
|
||||
darwin)
|
||||
install_packages go
|
||||
execute_non_root go install tailscale.com/cmd/tailscale{,d}@latest
|
||||
append_to_path "$HOME/go/bin"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
install_buildkite() {
|
||||
home_dir="/var/lib/buildkite-agent"
|
||||
config_dir="/etc/buildkite-agent"
|
||||
config_file="$config_dir/buildkite-agent.cfg"
|
||||
|
||||
if ! [ -d "$home_dir" ]; then
|
||||
execute_sudo mkdir -p "$home_dir"
|
||||
fi
|
||||
|
||||
if ! [ -d "$config_dir" ]; then
|
||||
execute_sudo mkdir -p "$config_dir"
|
||||
fi
|
||||
|
||||
case "$os" in
|
||||
linux)
|
||||
getent="$(require getent)"
|
||||
if [ -z "$("$getent" passwd buildkite-agent)" ]; then
|
||||
useradd="$(require useradd)"
|
||||
execute "$useradd" buildkite-agent \
|
||||
--system \
|
||||
--no-create-home \
|
||||
--home-dir "$home_dir"
|
||||
fi
|
||||
|
||||
if [ -n "$("$getent" group docker)" ]; then
|
||||
usermod="$(require usermod)"
|
||||
execute "$usermod" -aG docker buildkite-agent
|
||||
fi
|
||||
|
||||
execute chown -R buildkite-agent:buildkite-agent "$home_dir"
|
||||
execute chown -R buildkite-agent:buildkite-agent "$config_dir"
|
||||
;;
|
||||
darwin)
|
||||
execute_sudo chown -R "$user:admin" "$home_dir"
|
||||
execute_sudo chown -R "$user:admin" "$config_dir"
|
||||
;;
|
||||
esac
|
||||
|
||||
if ! [ -f "$config_file" ]; then
|
||||
cat <<EOF >"$config_file"
|
||||
# This is generated by scripts/bootstrap.sh
|
||||
# https://buildkite.com/docs/agent/v3/configuration
|
||||
|
||||
name="%hostname-%random"
|
||||
tags="v=$v,os=$os,arch=$arch,distro=$distro,release=$release,kernel=$kernel,glibc=$glibc"
|
||||
|
||||
build-path="$home_dir/builds"
|
||||
git-mirrors-path="$home_dir/git"
|
||||
job-log-path="$home_dir/logs"
|
||||
plugins-path="$config_dir/plugins"
|
||||
hooks-path="$config_dir/hooks"
|
||||
|
||||
no-ssh-keyscan=true
|
||||
cancel-grace-period=3600000 # 1 hour
|
||||
enable-job-log-tmpfile=true
|
||||
experiment="normalised-upload-paths,resolve-commit-after-checkout,agent-api"
|
||||
EOF
|
||||
fi
|
||||
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://raw.githubusercontent.com/buildkite/agent/main/install.sh")
|
||||
execute "$bash" "$script"
|
||||
|
||||
out_dir="$HOME/.buildkite-agent"
|
||||
execute_sudo mv -f "$out_dir/bin/buildkite-agent" "/usr/local/bin/buildkite-agent"
|
||||
execute rm -rf "$out_dir"
|
||||
}
|
||||
|
||||
install_chrome_dependencies() {
|
||||
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#chrome-doesnt-launch-on-linux
|
||||
# https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-puppeteer-in-the-cloud
|
||||
case "$pm" in
|
||||
apt)
|
||||
install_packages \
|
||||
fonts-liberation \
|
||||
libatk-bridge2.0-0 \
|
||||
libatk1.0-0 \
|
||||
libc6 \
|
||||
libcairo2 \
|
||||
libcups2 \
|
||||
libdbus-1-3 \
|
||||
libexpat1 \
|
||||
libfontconfig1 \
|
||||
libgbm1 \
|
||||
libgcc1 \
|
||||
libglib2.0-0 \
|
||||
libgtk-3-0 \
|
||||
libnspr4 \
|
||||
libnss3 \
|
||||
libpango-1.0-0 \
|
||||
libpangocairo-1.0-0 \
|
||||
libstdc++6 \
|
||||
libx11-6 \
|
||||
libx11-xcb1 \
|
||||
libxcb1 \
|
||||
libxcomposite1 \
|
||||
libxcursor1 \
|
||||
libxdamage1 \
|
||||
libxext6 \
|
||||
libxfixes3 \
|
||||
libxi6 \
|
||||
libxrandr2 \
|
||||
libxrender1 \
|
||||
libxss1 \
|
||||
libxtst6 \
|
||||
xdg-utils
|
||||
|
||||
# Fixes issue in newer version of Ubuntu:
|
||||
# Package 'libasound2' has no installation candidate
|
||||
if [ "$(check_package "libasound2t64")" ]; then
|
||||
install_packages libasound2t64
|
||||
else
|
||||
install_packages libasound2
|
||||
fi
|
||||
;;
|
||||
dnf | yum)
|
||||
install_packages \
|
||||
alsa-lib \
|
||||
atk \
|
||||
cups-libs \
|
||||
gtk3 \
|
||||
ipa-gothic-fonts \
|
||||
libXcomposite \
|
||||
libXcursor \
|
||||
libXdamage \
|
||||
libXext \
|
||||
libXi \
|
||||
libXrandr \
|
||||
libXScrnSaver \
|
||||
libXtst \
|
||||
pango \
|
||||
xorg-x11-fonts-100dpi \
|
||||
xorg-x11-fonts-75dpi \
|
||||
xorg-x11-fonts-cyrillic \
|
||||
xorg-x11-fonts-misc \
|
||||
xorg-x11-fonts-Type1 \
|
||||
xorg-x11-utils
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
main() {
|
||||
check_system
|
||||
update_packages
|
||||
install_common_software
|
||||
install_build_essentials
|
||||
install_chrome_dependencies
|
||||
install_ci_dependencies
|
||||
}
|
||||
|
||||
main
|
||||
@@ -130,10 +130,7 @@ function getCachePath(branch) {
|
||||
const repository = process.env.BUILDKITE_REPO;
|
||||
const fork = process.env.BUILDKITE_PULL_REQUEST_REPO;
|
||||
const repositoryKey = (fork || repository).replace(/[^a-z0-9]/gi, "-");
|
||||
const branchName = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
|
||||
const branchKey = branchName.startsWith("gh-readonly-queue-")
|
||||
? branchName.slice(18, branchName.indexOf("-pr-"))
|
||||
: branchName;
|
||||
const branchKey = (branch || process.env.BUILDKITE_BRANCH).replace(/[^a-z0-9]/gi, "-");
|
||||
const stepKey = process.env.BUILDKITE_STEP_KEY.replace(/[^a-z0-9]/gi, "-");
|
||||
return resolve(buildPath, "..", "cache", repositoryKey, branchKey, stepKey);
|
||||
}
|
||||
|
||||
@@ -233,13 +233,8 @@ async function runTests() {
|
||||
reportOutputToGitHubAction("failing_tests", markdown);
|
||||
}
|
||||
|
||||
if (!isCI) {
|
||||
console.log("-------");
|
||||
console.log("passing", results.length - failedTests.length, "/", results.length);
|
||||
for (const { testPath } of failedTests) {
|
||||
console.log("-", testPath);
|
||||
}
|
||||
}
|
||||
if (!isCI) console.log("-------");
|
||||
if (!isCI) console.log("passing", results.length - failedTests.length, "/", results.length);
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
@@ -676,7 +676,7 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
|
||||
}
|
||||
|
||||
// There's two parts to this.
|
||||
// 1. Storing the underlying string.
|
||||
// 1. Storing the underyling string.
|
||||
// 2. Making the key accessible at the index.
|
||||
pub fn putKey(self: *Self, key: anytype, result: *Result) !void {
|
||||
self.map.mutex.lock();
|
||||
|
||||
@@ -1635,8 +1635,6 @@ pub const Api = struct {
|
||||
/// define
|
||||
define: ?StringMap = null,
|
||||
|
||||
drop: []const []const u8 = &.{},
|
||||
|
||||
/// preserve_symlinks
|
||||
preserve_symlinks: ?bool = null,
|
||||
|
||||
@@ -2976,13 +2974,6 @@ pub const Api = struct {
|
||||
/// concurrent_scripts
|
||||
concurrent_scripts: ?u32 = null,
|
||||
|
||||
cafile: ?[]const u8 = null,
|
||||
|
||||
ca: ?union(enum) {
|
||||
str: []const u8,
|
||||
list: []const []const u8,
|
||||
} = null,
|
||||
|
||||
pub fn decode(reader: anytype) anyerror!BunInstall {
|
||||
var this = std.mem.zeroes(BunInstall);
|
||||
|
||||
|
||||
@@ -47,8 +47,8 @@ pub const Index = packed struct(u32) {
|
||||
pub const invalid = Index{ .value = std.math.maxInt(Int) };
|
||||
pub const runtime = Index{ .value = 0 };
|
||||
|
||||
pub const bake_server_data = Index{ .value = 1 };
|
||||
pub const bake_client_data = Index{ .value = 2 };
|
||||
pub const kit_server_data = Index{ .value = 1 };
|
||||
pub const kit_client_data = Index{ .value = 2 };
|
||||
|
||||
pub const Int = u32;
|
||||
|
||||
|
||||
@@ -14,29 +14,6 @@ pub fn BabyList(comptime Type: type) type {
|
||||
|
||||
pub const Elem = Type;
|
||||
|
||||
pub fn parse(input: *bun.css.Parser) bun.css.Result(ListType) {
|
||||
return switch (input.parseCommaSeparated(Type, bun.css.generic.parseFor(Type))) {
|
||||
.result => |v| return .{ .result = ListType{
|
||||
.ptr = v.items.ptr,
|
||||
.len = @intCast(v.items.len),
|
||||
.cap = @intCast(v.capacity),
|
||||
} },
|
||||
.err => |e| return .{ .err = e },
|
||||
};
|
||||
}
|
||||
|
||||
pub fn toCss(this: *const ListType, comptime W: type, dest: *bun.css.Printer(W)) bun.css.PrintErr!void {
|
||||
return bun.css.to_css.fromBabyList(Type, this, W, dest);
|
||||
}
|
||||
|
||||
pub fn eql(lhs: *const ListType, rhs: *const ListType) bool {
|
||||
if (lhs.len != rhs.len) return false;
|
||||
for (lhs.sliceConst(), rhs.sliceConst()) |*a, *b| {
|
||||
if (!bun.css.generic.eql(Type, a, b)) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn set(this: *@This(), slice_: []Type) void {
|
||||
this.ptr = slice_.ptr;
|
||||
this.len = @as(u32, @truncate(slice_.len));
|
||||
@@ -52,12 +29,6 @@ pub fn BabyList(comptime Type: type) type {
|
||||
this.* = .{};
|
||||
}
|
||||
|
||||
pub fn shrinkAndFree(this: *@This(), allocator: std.mem.Allocator, size: usize) void {
|
||||
var list_ = this.listManaged(allocator);
|
||||
list_.shrinkAndFree(size);
|
||||
this.update(list_);
|
||||
}
|
||||
|
||||
pub fn orderedRemove(this: *@This(), index: usize) Type {
|
||||
var l = this.list();
|
||||
defer this.update(l);
|
||||
@@ -70,12 +41,6 @@ pub fn BabyList(comptime Type: type) type {
|
||||
return l.swapRemove(index);
|
||||
}
|
||||
|
||||
pub fn sortAsc(
|
||||
this: *@This(),
|
||||
) void {
|
||||
bun.strings.sortAsc(this.slice());
|
||||
}
|
||||
|
||||
pub fn contains(this: @This(), item: []const Type) bool {
|
||||
return this.len > 0 and @intFromPtr(item.ptr) >= @intFromPtr(this.ptr) and @intFromPtr(item.ptr) < @intFromPtr(this.ptr) + this.len;
|
||||
}
|
||||
@@ -112,17 +77,8 @@ pub fn BabyList(comptime Type: type) type {
|
||||
};
|
||||
}
|
||||
|
||||
fn assertValidDeepClone(comptime T: type) void {
|
||||
return switch (T) {
|
||||
bun.JSAst.Expr, bun.JSAst.G.Property, bun.css.ImportConditions => {},
|
||||
else => {
|
||||
@compileError("Unsupported type for BabyList.deepClone(): " ++ @typeName(Type));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deepClone(this: @This(), allocator: std.mem.Allocator) !@This() {
|
||||
assertValidDeepClone(Type);
|
||||
if (comptime Type != bun.JSAst.Expr and Type != bun.JSAst.G.Property) @compileError("Unsupported type for BabyList.deepClone()");
|
||||
var list_ = try initCapacity(allocator, this.len);
|
||||
for (this.slice()) |item| {
|
||||
list_.appendAssumeCapacity(try item.deepClone(allocator));
|
||||
@@ -131,17 +87,6 @@ pub fn BabyList(comptime Type: type) type {
|
||||
return list_;
|
||||
}
|
||||
|
||||
/// Same as `deepClone` but doesn't return an error
|
||||
pub fn deepClone2(this: @This(), allocator: std.mem.Allocator) @This() {
|
||||
assertValidDeepClone(Type);
|
||||
var list_ = initCapacity(allocator, this.len) catch bun.outOfMemory();
|
||||
for (this.slice()) |item| {
|
||||
list_.appendAssumeCapacity(item.deepClone(allocator));
|
||||
}
|
||||
|
||||
return list_;
|
||||
}
|
||||
|
||||
pub fn clearRetainingCapacity(this: *@This()) void {
|
||||
this.len = 0;
|
||||
}
|
||||
@@ -319,11 +264,6 @@ pub fn BabyList(comptime Type: type) type {
|
||||
return this.ptr[0..this.len];
|
||||
}
|
||||
|
||||
pub fn sliceConst(this: *const ListType) callconv(bun.callconv_inline) []const Type {
|
||||
@setRuntimeSafety(false);
|
||||
return this.ptr[0..this.len];
|
||||
}
|
||||
|
||||
pub fn write(this: *@This(), allocator: std.mem.Allocator, str: []const u8) !u32 {
|
||||
if (comptime Type != u8)
|
||||
@compileError("Unsupported for type " ++ @typeName(Type));
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
#include "BakeGlobalObject.h"
|
||||
#include "JSNextTickQueue.h"
|
||||
#include "JavaScriptCore/GlobalObjectMethodTable.h"
|
||||
#include "JavaScriptCore/JSInternalPromise.h"
|
||||
#include "ProcessIdentifier.h"
|
||||
#include "headers-handwritten.h"
|
||||
|
||||
namespace Bake {
|
||||
|
||||
extern "C" void BakeInitProcessIdentifier()
|
||||
{
|
||||
// assert is on main thread
|
||||
WebCore::Process::identifier();
|
||||
}
|
||||
|
||||
JSC::JSInternalPromise*
|
||||
bakeModuleLoaderImportModule(JSC::JSGlobalObject* jsGlobalObject,
|
||||
JSC::JSModuleLoader*, JSC::JSString* moduleNameValue,
|
||||
JSC::JSValue parameters,
|
||||
const JSC::SourceOrigin& sourceOrigin)
|
||||
{
|
||||
// TODO: forward this to the runtime?
|
||||
JSC::VM& vm = jsGlobalObject->vm();
|
||||
WTF::String keyString = moduleNameValue->getString(jsGlobalObject);
|
||||
auto err = JSC::createTypeError(
|
||||
jsGlobalObject,
|
||||
WTF::makeString(
|
||||
"Dynamic import to '"_s, keyString,
|
||||
"' should have been replaced with a hook into the module runtime"_s));
|
||||
auto* promise = JSC::JSInternalPromise::create(
|
||||
vm, jsGlobalObject->internalPromiseStructure());
|
||||
promise->reject(jsGlobalObject, err);
|
||||
return promise;
|
||||
}
|
||||
|
||||
extern "C" BunString BakeProdResolve(JSC::JSGlobalObject*, BunString a, BunString b);
|
||||
|
||||
JSC::Identifier bakeModuleLoaderResolve(JSC::JSGlobalObject* jsGlobal,
|
||||
JSC::JSModuleLoader* loader, JSC::JSValue key,
|
||||
JSC::JSValue referrer, JSC::JSValue origin)
|
||||
{
|
||||
Bake::GlobalObject* global = jsCast<Bake::GlobalObject*>(jsGlobal);
|
||||
JSC::VM& vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
if (global->isProduction()) {
|
||||
WTF::String keyString = key.toWTFString(global);
|
||||
RETURN_IF_EXCEPTION(scope, vm.propertyNames->emptyIdentifier);
|
||||
|
||||
ASSERT(referrer.isString());
|
||||
auto refererString = jsCast<JSC::JSString*>(referrer)->value(global);
|
||||
|
||||
BunString result = BakeProdResolve(global, Bun::toString(referrer.getString(global)), Bun::toString(keyString));
|
||||
return JSC::Identifier::fromString(vm, result.toWTFString(BunString::ZeroCopy));
|
||||
} else {
|
||||
JSC::throwTypeError(global, scope, "External imports are not allowed in Bun Bake's dev server. This is a bug in Bun's bundler."_s);
|
||||
return vm.propertyNames->emptyIdentifier;
|
||||
}
|
||||
}
|
||||
|
||||
#define INHERIT_HOOK_METHOD(name) \
|
||||
Zig::GlobalObject::s_globalObjectMethodTable.name
|
||||
|
||||
const JSC::GlobalObjectMethodTable GlobalObject::s_globalObjectMethodTable = {
|
||||
INHERIT_HOOK_METHOD(supportsRichSourceInfo),
|
||||
INHERIT_HOOK_METHOD(shouldInterruptScript),
|
||||
INHERIT_HOOK_METHOD(javaScriptRuntimeFlags),
|
||||
INHERIT_HOOK_METHOD(queueMicrotaskToEventLoop),
|
||||
INHERIT_HOOK_METHOD(shouldInterruptScriptBeforeTimeout),
|
||||
bakeModuleLoaderImportModule,
|
||||
bakeModuleLoaderResolve,
|
||||
INHERIT_HOOK_METHOD(moduleLoaderFetch),
|
||||
INHERIT_HOOK_METHOD(moduleLoaderCreateImportMetaProperties),
|
||||
INHERIT_HOOK_METHOD(moduleLoaderEvaluate),
|
||||
INHERIT_HOOK_METHOD(promiseRejectionTracker),
|
||||
INHERIT_HOOK_METHOD(reportUncaughtExceptionAtEventLoop),
|
||||
INHERIT_HOOK_METHOD(currentScriptExecutionOwner),
|
||||
INHERIT_HOOK_METHOD(scriptExecutionStatus),
|
||||
INHERIT_HOOK_METHOD(reportViolationForUnsafeEval),
|
||||
INHERIT_HOOK_METHOD(defaultLanguage),
|
||||
INHERIT_HOOK_METHOD(compileStreaming),
|
||||
INHERIT_HOOK_METHOD(instantiateStreaming),
|
||||
INHERIT_HOOK_METHOD(deriveShadowRealmGlobalObject),
|
||||
INHERIT_HOOK_METHOD(codeForEval),
|
||||
INHERIT_HOOK_METHOD(canCompileStrings),
|
||||
};
|
||||
|
||||
GlobalObject* GlobalObject::create(JSC::VM& vm, JSC::Structure* structure,
|
||||
const JSC::GlobalObjectMethodTable* methodTable)
|
||||
{
|
||||
GlobalObject* ptr = new (NotNull, JSC::allocateCell<GlobalObject>(vm))
|
||||
GlobalObject(vm, structure, methodTable);
|
||||
ptr->finishCreation(vm);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
void GlobalObject::finishCreation(JSC::VM& vm)
|
||||
{
|
||||
Base::finishCreation(vm);
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
|
||||
extern "C" BunVirtualMachine* Bun__getVM();
|
||||
|
||||
// A lot of this function is taken from 'Zig__GlobalObject__create'
|
||||
// TODO: remove this entire method
|
||||
extern "C" GlobalObject* BakeCreateDevGlobal(DevServer* owner,
|
||||
void* console)
|
||||
{
|
||||
JSC::VM& vm = JSC::VM::create(JSC::HeapType::Large).leakRef();
|
||||
vm.heap.acquireAccess();
|
||||
JSC::JSLockHolder locker(vm);
|
||||
BunVirtualMachine* bunVM = Bun__getVM();
|
||||
WebCore::JSVMClientData::create(&vm, bunVM);
|
||||
|
||||
JSC::Structure* structure = GlobalObject::createStructure(vm);
|
||||
GlobalObject* global = GlobalObject::create(
|
||||
vm, structure, &GlobalObject::s_globalObjectMethodTable);
|
||||
if (!global)
|
||||
BUN_PANIC("Failed to create BakeGlobalObject");
|
||||
|
||||
global->m_devServer = owner;
|
||||
global->m_bunVM = bunVM;
|
||||
|
||||
JSC::gcProtect(global);
|
||||
|
||||
global->setConsole(console);
|
||||
global->setStackTraceLimit(10); // Node.js defaults to 10
|
||||
|
||||
// TODO: it segfaults! process.nextTick is scoped out for now i guess!
|
||||
// vm.setOnComputeErrorInfo(computeErrorInfoWrapper);
|
||||
// vm.setOnEachMicrotaskTick([global](JSC::VM &vm) -> void {
|
||||
// if (auto nextTickQueue = global->m_nextTickQueue.get()) {
|
||||
// global->resetOnEachMicrotaskTick();
|
||||
// // Bun::JSNextTickQueue *queue =
|
||||
// // jsCast<Bun::JSNextTickQueue *>(nextTickQueue);
|
||||
// // queue->drain(vm, global);
|
||||
// return;
|
||||
// }
|
||||
// });
|
||||
|
||||
return global;
|
||||
}
|
||||
|
||||
extern "C" GlobalObject* BakeCreateProdGlobal(JSC::VM* vm, void* console)
|
||||
{
|
||||
JSC::JSLockHolder locker(vm);
|
||||
BunVirtualMachine* bunVM = Bun__getVM();
|
||||
|
||||
JSC::Structure* structure = GlobalObject::createStructure(*vm);
|
||||
GlobalObject* global = GlobalObject::create(*vm, structure, &GlobalObject::s_globalObjectMethodTable);
|
||||
if (!global)
|
||||
BUN_PANIC("Failed to create BakeGlobalObject");
|
||||
|
||||
global->m_devServer = nullptr;
|
||||
global->m_bunVM = bunVM;
|
||||
|
||||
JSC::gcProtect(global);
|
||||
|
||||
global->setConsole(console);
|
||||
global->setStackTraceLimit(10); // Node.js defaults to 10
|
||||
|
||||
return global;
|
||||
}
|
||||
|
||||
}; // namespace Bake
|
||||
@@ -1,39 +0,0 @@
|
||||
#include "BakeProduction.h"
|
||||
#include "BunBuiltinNames.h"
|
||||
#include "WebCoreJSBuiltins.h"
|
||||
#include "JavaScriptCore/JSPromise.h"
|
||||
#include "JavaScriptCore/Exception.h"
|
||||
|
||||
namespace Bake {
|
||||
|
||||
extern "C" JSC::JSPromise* BakeRenderRoutesForProd(
|
||||
JSC::JSGlobalObject* global,
|
||||
BunString outbase,
|
||||
JSC::JSValue renderStaticCallback,
|
||||
JSC::JSValue clientEntryUrl,
|
||||
JSC::JSValue files,
|
||||
JSC::JSValue patterns,
|
||||
JSC::JSValue styles)
|
||||
{
|
||||
JSC::VM& vm = global->vm();
|
||||
JSC::JSFunction* cb = JSC::JSFunction::create(vm, global, WebCore::bakeRenderRoutesForProdCodeGenerator(vm), global);
|
||||
JSC::CallData callData = JSC::getCallData(cb);
|
||||
|
||||
JSC::MarkedArgumentBuffer args;
|
||||
args.append(JSC::jsString(vm, outbase.toWTFString()));
|
||||
args.append(renderStaticCallback);
|
||||
args.append(clientEntryUrl);
|
||||
args.append(files);
|
||||
args.append(patterns);
|
||||
args.append(styles);
|
||||
|
||||
NakedPtr<JSC::Exception> returnedException = nullptr;
|
||||
auto result = JSC::call(global, cb, callData, JSC::jsUndefined(), args, returnedException);
|
||||
if (UNLIKELY(returnedException)) {
|
||||
// This should be impossible because it returns a promise.
|
||||
return JSC::JSPromise::rejectedPromise(global, returnedException->value());
|
||||
}
|
||||
return JSC::jsCast<JSC::JSPromise*>(result);
|
||||
}
|
||||
|
||||
} // namespace Bake
|
||||
@@ -1,5 +0,0 @@
|
||||
#include "root.h"
|
||||
#include "headers-handwritten.h"
|
||||
|
||||
namespace Bake {
|
||||
} // namespace Bake
|
||||
@@ -1,127 +0,0 @@
|
||||
// clang-format off
|
||||
#include "BakeSourceProvider.h"
|
||||
#include "BakeGlobalObject.h"
|
||||
#include "JavaScriptCore/Completion.h"
|
||||
#include "JavaScriptCore/Identifier.h"
|
||||
#include "JavaScriptCore/JSCJSValue.h"
|
||||
#include "JavaScriptCore/JSCast.h"
|
||||
#include "JavaScriptCore/JSLock.h"
|
||||
#include "JavaScriptCore/JSMap.h"
|
||||
#include "JavaScriptCore/JSModuleLoader.h"
|
||||
#include "JavaScriptCore/JSString.h"
|
||||
#include "JavaScriptCore/JSModuleNamespaceObject.h"
|
||||
|
||||
namespace Bake {
|
||||
|
||||
extern "C" LoadServerCodeResult BakeLoadInitialServerCode(GlobalObject* global, BunString source) {
|
||||
JSC::VM& vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
String string = "bake://server.js"_s;
|
||||
JSC::JSString* key = JSC::jsString(vm, string);
|
||||
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
|
||||
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
|
||||
source.toWTFString(),
|
||||
origin,
|
||||
WTFMove(string),
|
||||
WTF::TextPosition(),
|
||||
JSC::SourceProviderSourceType::Module
|
||||
));
|
||||
|
||||
global->moduleLoader()->provideFetch(global, key, sourceCode);
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
JSC::JSInternalPromise* internalPromise = global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
|
||||
return { internalPromise, key };
|
||||
}
|
||||
|
||||
extern "C" JSC::JSInternalPromise* BakeLoadModuleByKey(GlobalObject* global, JSC::JSString* key) {
|
||||
return global->moduleLoader()->loadAndEvaluateModule(global, key, JSC::jsUndefined(), JSC::jsUndefined());
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeLoadServerHmrPatch(GlobalObject* global, BunString source) {
|
||||
JSC::VM&vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
String string = "bake://server.patch.js"_s;
|
||||
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
|
||||
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
|
||||
source.toWTFString(),
|
||||
origin,
|
||||
WTFMove(string),
|
||||
WTF::TextPosition(),
|
||||
JSC::SourceProviderSourceType::Program
|
||||
));
|
||||
|
||||
JSC::JSValue result = vm.interpreter.executeProgram(sourceCode, global, global);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
|
||||
|
||||
RELEASE_ASSERT(result);
|
||||
return JSC::JSValue::encode(result);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeGetModuleNamespace(
|
||||
JSC::JSGlobalObject* global,
|
||||
JSC::JSValue keyValue
|
||||
) {
|
||||
JSC::JSString* key = JSC::jsCast<JSC::JSString*>(keyValue);
|
||||
JSC::VM& vm = global->vm();
|
||||
JSC::JSMap* map = JSC::jsCast<JSC::JSMap*>(
|
||||
global->moduleLoader()->getDirect(
|
||||
vm, JSC::Identifier::fromString(global->vm(), "registry"_s)
|
||||
));
|
||||
JSC::JSValue entry = map->get(global, key);
|
||||
ASSERT(entry.isObject()); // should have called BakeLoadServerCode and wait for that promise
|
||||
JSC::JSValue module = entry.getObject()->get(global, JSC::Identifier::fromString(global->vm(), "module"_s));
|
||||
ASSERT(module.isCell());
|
||||
JSC::JSModuleNamespaceObject* namespaceObject = global->moduleLoader()->getModuleNamespaceObject(global, module);
|
||||
ASSERT(namespaceObject);
|
||||
return JSC::JSValue::encode(namespaceObject);
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeGetDefaultExportFromModule(
|
||||
JSC::JSGlobalObject* global,
|
||||
JSC::JSValue keyValue
|
||||
) {
|
||||
JSC::VM& vm = global->vm();
|
||||
return JSC::JSValue::encode(jsCast<JSC::JSModuleNamespaceObject*>(JSC::JSValue::decode(BakeGetModuleNamespace(global, keyValue)))->get(global, vm.propertyNames->defaultKeyword));
|
||||
}
|
||||
|
||||
// There were issues when trying to use JSValue.get from zig
|
||||
extern "C" JSC::EncodedJSValue BakeGetOnModuleNamespace(
|
||||
JSC::JSGlobalObject* global,
|
||||
JSC::JSModuleNamespaceObject* moduleNamespace,
|
||||
const unsigned char* key,
|
||||
size_t keyLength
|
||||
) {
|
||||
JSC::VM& vm = global->vm();
|
||||
const auto propertyString = String(StringImpl::createWithoutCopying({ key, keyLength }));
|
||||
const auto identifier = JSC::Identifier::fromString(vm, propertyString);
|
||||
const auto property = JSC::PropertyName(identifier);
|
||||
return JSC::JSValue::encode(moduleNamespace->get(global, property));
|
||||
}
|
||||
|
||||
extern "C" JSC::EncodedJSValue BakeRegisterProductionChunk(JSC::JSGlobalObject* global, BunString virtualPathName, BunString source) {
|
||||
JSC::VM& vm = global->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
String string = virtualPathName.toWTFString();
|
||||
JSC::JSString* key = JSC::jsString(vm, string);
|
||||
JSC::SourceOrigin origin = JSC::SourceOrigin(WTF::URL(string));
|
||||
JSC::SourceCode sourceCode = JSC::SourceCode(DevSourceProvider::create(
|
||||
source.toWTFString(),
|
||||
origin,
|
||||
WTFMove(string),
|
||||
WTF::TextPosition(),
|
||||
JSC::SourceProviderSourceType::Module
|
||||
));
|
||||
|
||||
global->moduleLoader()->provideFetch(global, key, sourceCode);
|
||||
RETURN_IF_EXCEPTION(scope, JSC::JSValue::encode({}));
|
||||
|
||||
return JSC::JSValue::encode(key);
|
||||
}
|
||||
|
||||
} // namespace Bake
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user