mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 02:18:47 +00:00
Compare commits
5 Commits
nektro-pat
...
nektro-pat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90ed76fc6b | ||
|
|
28de650231 | ||
|
|
1a4679ee34 | ||
|
|
06db4eb897 | ||
|
|
f5d8601ed9 |
@@ -1,31 +0,0 @@
|
||||
# Uploads the latest CI workflow to Buildkite.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# Changes to this file must be manually edited here:
|
||||
# https://buildkite.com/bun/bun/settings/steps
|
||||
steps:
|
||||
- if: "build.pull_request.repository.fork"
|
||||
block: ":eyes:"
|
||||
prompt: "Did you review the PR?"
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- ".buildkite/scripts/prepare-build.sh"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
- "darwin-x64-build-bun"
|
||||
- "linux-aarch64-build-bun"
|
||||
- "linux-x64-build-bun"
|
||||
- "linux-x64-baseline-build-bun"
|
||||
- "windows-x64-build-bun"
|
||||
- "windows-x64-baseline-build-bun"
|
||||
command:
|
||||
- ".buildkite/scripts/upload-release.sh"
|
||||
@@ -1,848 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Build and test Bun on macOS, Linux, and Windows.
|
||||
* @link https://buildkite.com/docs/pipelines/defining-steps
|
||||
*/
|
||||
|
||||
import { writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import {
|
||||
getBootstrapVersion,
|
||||
getBuildNumber,
|
||||
getCanaryRevision,
|
||||
getChangedFiles,
|
||||
getCommit,
|
||||
getCommitMessage,
|
||||
getEnv,
|
||||
getLastSuccessfulBuild,
|
||||
getMainBranch,
|
||||
getTargetBranch,
|
||||
isBuildkite,
|
||||
isFork,
|
||||
isMainBranch,
|
||||
isMergeQueue,
|
||||
printEnvironment,
|
||||
spawnSafe,
|
||||
toYaml,
|
||||
uploadArtifact,
|
||||
} from "../scripts/utils.mjs";
|
||||
|
||||
/**
|
||||
* @typedef PipelineOptions
|
||||
* @property {string} [buildId]
|
||||
* @property {boolean} [buildImages]
|
||||
* @property {boolean} [publishImages]
|
||||
* @property {boolean} [skipTests]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {PipelineOptions} options
|
||||
*/
|
||||
function getPipeline(options) {
|
||||
const { buildId, buildImages, publishImages, skipTests } = options;
|
||||
|
||||
/**
|
||||
* Helpers
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
* @returns {string}
|
||||
* @link https://github.com/buildkite/emojis#emoji-reference
|
||||
*/
|
||||
const getEmoji = string => {
|
||||
if (string === "amazonlinux") {
|
||||
return ":aws:";
|
||||
}
|
||||
return `:${string}:`;
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows"} Os
|
||||
* @typedef {"aarch64" | "x64"} Arch
|
||||
* @typedef {"musl"} Abi
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef Target
|
||||
* @property {Os} os
|
||||
* @property {Arch} arch
|
||||
* @property {Abi} [abi]
|
||||
* @property {boolean} [baseline]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {string}
|
||||
*/
|
||||
const getTargetKey = target => {
|
||||
const { os, arch, abi, baseline } = target;
|
||||
let key = `${os}-${arch}`;
|
||||
if (abi) {
|
||||
key += `-${abi}`;
|
||||
}
|
||||
if (baseline) {
|
||||
key += "-baseline";
|
||||
}
|
||||
return key;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {string}
|
||||
*/
|
||||
const getTargetLabel = target => {
|
||||
const { os, arch, abi, baseline } = target;
|
||||
let label = `${getEmoji(os)} ${arch}`;
|
||||
if (abi) {
|
||||
label += `-${abi}`;
|
||||
}
|
||||
if (baseline) {
|
||||
label += "-baseline";
|
||||
}
|
||||
return label;
|
||||
};
|
||||
|
||||
/**
|
||||
* @typedef Platform
|
||||
* @property {Os} os
|
||||
* @property {Arch} arch
|
||||
* @property {Abi} [abi]
|
||||
* @property {boolean} [baseline]
|
||||
* @property {string} [distro]
|
||||
* @property {string} release
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
const getPlatformKey = platform => {
|
||||
const { os, arch, abi, baseline, distro, release } = platform;
|
||||
const target = getTargetKey({ os, arch, abi, baseline });
|
||||
if (distro) {
|
||||
return `${target}-${distro}-${release.replace(/\./g, "")}`;
|
||||
}
|
||||
return `${target}-${release.replace(/\./g, "")}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
const getPlatformLabel = platform => {
|
||||
const { os, arch, baseline, distro, release } = platform;
|
||||
let label = `${getEmoji(distro || os)} ${release} ${arch}`;
|
||||
if (baseline) {
|
||||
label += "-baseline";
|
||||
}
|
||||
return label;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
const getImageKey = platform => {
|
||||
const { os, arch, distro, release } = platform;
|
||||
if (distro) {
|
||||
return `${os}-${arch}-${distro}-${release.replace(/\./g, "")}`;
|
||||
}
|
||||
return `${os}-${arch}-${release.replace(/\./g, "")}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {string}
|
||||
*/
|
||||
const getImageLabel = platform => {
|
||||
const { os, arch, distro, release } = platform;
|
||||
return `${getEmoji(distro || os)} ${release} ${arch}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {number} [limit]
|
||||
* @link https://buildkite.com/docs/pipelines/command-step#retry-attributes
|
||||
*/
|
||||
const getRetry = (limit = 0) => {
|
||||
return {
|
||||
automatic: [
|
||||
{ exit_status: 1, limit },
|
||||
{ exit_status: -1, limit: 3 },
|
||||
{ exit_status: 255, limit: 3 },
|
||||
{ signal_reason: "agent_stop", limit: 3 },
|
||||
],
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @returns {number}
|
||||
* @link https://buildkite.com/docs/pipelines/managing-priorities
|
||||
*/
|
||||
const getPriority = () => {
|
||||
if (isFork()) {
|
||||
return -1;
|
||||
}
|
||||
if (isMainBranch()) {
|
||||
return 2;
|
||||
}
|
||||
if (isMergeQueue()) {
|
||||
return 1;
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {Record<string, string | undefined>}
|
||||
*/
|
||||
const getBuildEnv = target => {
|
||||
const { baseline, abi } = target;
|
||||
return {
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
ABI: abi === "musl" ? "musl" : undefined,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {string}
|
||||
*/
|
||||
const getBuildToolchain = target => {
|
||||
const { os, arch, abi, baseline } = target;
|
||||
let key = `${os}-${arch}`;
|
||||
if (abi) {
|
||||
key += `-${abi}`;
|
||||
}
|
||||
if (baseline) {
|
||||
key += "-baseline";
|
||||
}
|
||||
return key;
|
||||
};
|
||||
|
||||
/**
|
||||
* Agents
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Record<string, string | undefined>} Agent
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {boolean}
|
||||
*/
|
||||
const isUsingNewAgent = platform => {
|
||||
const { os } = platform;
|
||||
if (os === "linux") {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {"v1" | "v2"} version
|
||||
* @param {Platform} platform
|
||||
* @param {string} [instanceType]
|
||||
* @returns {Agent}
|
||||
*/
|
||||
const getEmphemeralAgent = (version, platform, instanceType) => {
|
||||
const { os, arch, abi, distro, release } = platform;
|
||||
if (version === "v1") {
|
||||
return {
|
||||
robobun: true,
|
||||
os,
|
||||
arch,
|
||||
distro,
|
||||
release,
|
||||
};
|
||||
}
|
||||
let image;
|
||||
if (distro) {
|
||||
image = `${os}-${arch}-${distro}-${release}`;
|
||||
} else {
|
||||
image = `${os}-${arch}-${release}`;
|
||||
}
|
||||
if (buildImages && !publishImages) {
|
||||
image += `-build-${getBuildNumber()}`;
|
||||
} else {
|
||||
image += `-v${getBootstrapVersion()}`;
|
||||
}
|
||||
return {
|
||||
robobun: true,
|
||||
robobun2: true,
|
||||
os,
|
||||
arch,
|
||||
abi,
|
||||
distro,
|
||||
release,
|
||||
"image-name": image,
|
||||
"instance-type": instanceType,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {Agent}
|
||||
*/
|
||||
const getBuildAgent = target => {
|
||||
const { os, arch, abi } = target;
|
||||
if (isUsingNewAgent(target)) {
|
||||
const instanceType = arch === "aarch64" ? "c8g.8xlarge" : "c7i.8xlarge";
|
||||
return getEmphemeralAgent("v2", target, instanceType);
|
||||
}
|
||||
return {
|
||||
queue: `build-${os}`,
|
||||
os,
|
||||
arch,
|
||||
abi,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @returns {Agent}
|
||||
*/
|
||||
const getZigAgent = target => {
|
||||
const { abi, arch } = target;
|
||||
// if (abi === "musl") {
|
||||
// const instanceType = arch === "aarch64" ? "c8g.large" : "c7i.large";
|
||||
// return getEmphemeralAgent("v2", target, instanceType);
|
||||
// }
|
||||
return {
|
||||
queue: "build-zig",
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Agent}
|
||||
*/
|
||||
const getTestAgent = platform => {
|
||||
const { os, arch, release } = platform;
|
||||
if (isUsingNewAgent(platform)) {
|
||||
const instanceType = arch === "aarch64" ? "t4g.large" : "t3.large";
|
||||
return getEmphemeralAgent("v2", platform, instanceType);
|
||||
}
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
os,
|
||||
arch,
|
||||
release,
|
||||
queue: "test-darwin",
|
||||
};
|
||||
}
|
||||
return getEmphemeralAgent("v1", platform);
|
||||
};
|
||||
|
||||
/**
|
||||
* Steps
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef Step
|
||||
* @property {string} key
|
||||
* @property {string} [label]
|
||||
* @property {Record<string, string | undefined>} [agents]
|
||||
* @property {Record<string, string | undefined>} [env]
|
||||
* @property {string} command
|
||||
* @property {string[]} [depends_on]
|
||||
* @property {Record<string, string | undefined>} [retry]
|
||||
* @property {boolean} [cancel_on_build_failing]
|
||||
* @property {boolean} [soft_fail]
|
||||
* @property {number} [parallelism]
|
||||
* @property {number} [concurrency]
|
||||
* @property {string} [concurrency_group]
|
||||
* @property {number} [priority]
|
||||
* @property {number} [timeout_in_minutes]
|
||||
* @link https://buildkite.com/docs/pipelines/command-step
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {string} [step]
|
||||
* @returns {string[]}
|
||||
*/
|
||||
const getDependsOn = (platform, step) => {
|
||||
if (imagePlatforms.has(getImageKey(platform))) {
|
||||
const key = `${getImageKey(platform)}-build-image`;
|
||||
if (key !== step) {
|
||||
return [key];
|
||||
}
|
||||
}
|
||||
return [];
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getBuildImageStep = platform => {
|
||||
const { os, arch, distro, release } = platform;
|
||||
const action = publishImages ? "publish-image" : "create-image";
|
||||
return {
|
||||
key: `${getImageKey(platform)}-build-image`,
|
||||
label: `${getImageLabel(platform)} - build-image`,
|
||||
agents: {
|
||||
queue: "build-image",
|
||||
},
|
||||
env: {
|
||||
DEBUG: "1",
|
||||
},
|
||||
retry: getRetry(),
|
||||
command: `node ./scripts/machine.mjs ${action} --ci --cloud=aws --os=${os} --arch=${arch} --distro=${distro} --distro-version=${release}`,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getBuildVendorStep = platform => {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-vendor`,
|
||||
label: `${getTargetLabel(platform)} - build-vendor`,
|
||||
depends_on: getDependsOn(platform),
|
||||
agents: getBuildAgent(platform),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform),
|
||||
command: "bun run build:ci --target dependencies",
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getBuildCppStep = platform => {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-cpp`,
|
||||
label: `${getTargetLabel(platform)} - build-cpp`,
|
||||
depends_on: getDependsOn(platform),
|
||||
agents: getBuildAgent(platform),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform),
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getBuildZigStep = platform => {
|
||||
const toolchain = getBuildToolchain(platform);
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-zig`,
|
||||
label: `${getTargetLabel(platform)} - build-zig`,
|
||||
depends_on: getDependsOn(platform),
|
||||
agents: getZigAgent(platform),
|
||||
retry: getRetry(1), // FIXME: Sometimes zig build hangs, so we need to retry once
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform),
|
||||
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getBuildBunStep = platform => {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
depends_on: [
|
||||
`${getTargetKey(platform)}-build-vendor`,
|
||||
`${getTargetKey(platform)}-build-cpp`,
|
||||
`${getTargetKey(platform)}-build-zig`,
|
||||
],
|
||||
agents: getBuildAgent(platform),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
...getBuildEnv(platform),
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Step}
|
||||
*/
|
||||
const getTestBunStep = platform => {
|
||||
const { os } = platform;
|
||||
let command;
|
||||
if (os === "windows") {
|
||||
command = `node .\\scripts\\runner.node.mjs --step ${getTargetKey(platform)}-build-bun`;
|
||||
} else {
|
||||
command = `./scripts/runner.node.mjs --step ${getTargetKey(platform)}-build-bun`;
|
||||
}
|
||||
let parallelism;
|
||||
if (os === "darwin") {
|
||||
parallelism = 2;
|
||||
} else {
|
||||
parallelism = 10;
|
||||
}
|
||||
let env;
|
||||
let depends = [];
|
||||
if (buildId) {
|
||||
env = {
|
||||
BUILDKITE_ARTIFACT_BUILD_ID: buildId,
|
||||
};
|
||||
} else {
|
||||
depends = [`${getTargetKey(platform)}-build-bun`];
|
||||
}
|
||||
let retry;
|
||||
if (os !== "windows") {
|
||||
// When the runner fails on Windows, Buildkite only detects an exit code of 1.
|
||||
// Because of this, we don't know if the run was fatal, or soft-failed.
|
||||
retry = getRetry(1);
|
||||
}
|
||||
let soft_fail;
|
||||
if (isMainBranch()) {
|
||||
soft_fail = true;
|
||||
} else {
|
||||
soft_fail = [{ exit_status: 2 }];
|
||||
}
|
||||
return {
|
||||
key: `${getPlatformKey(platform)}-test-bun`,
|
||||
label: `${getPlatformLabel(platform)} - test-bun`,
|
||||
depends_on: [...depends, ...getDependsOn(platform)],
|
||||
agents: getTestAgent(platform),
|
||||
retry,
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
soft_fail,
|
||||
parallelism,
|
||||
command,
|
||||
env,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Config
|
||||
*/
|
||||
|
||||
/**
|
||||
* @type {Platform[]}
|
||||
*/
|
||||
const buildPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "14" },
|
||||
{ os: "darwin", arch: "x64", release: "14" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
];
|
||||
|
||||
/**
|
||||
* @type {Platform[]}
|
||||
*/
|
||||
const testPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "14" },
|
||||
{ os: "darwin", arch: "aarch64", release: "13" },
|
||||
{ os: "darwin", arch: "x64", release: "14" },
|
||||
{ os: "darwin", arch: "x64", release: "13" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "11" },
|
||||
// { os: "linux", arch: "aarch64", distro: "debian", release: "10" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "11" },
|
||||
// { os: "linux", arch: "x64", distro: "debian", release: "10" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" },
|
||||
// { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "10" },
|
||||
// { os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04" },
|
||||
// { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04" },
|
||||
// { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04" },
|
||||
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023" },
|
||||
// { os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2" },
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023" },
|
||||
// { os: "linux", arch: "x64", distro: "amazonlinux", release: "2" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023" },
|
||||
// { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
// { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.17" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
// { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.17" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
|
||||
// { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.17" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
];
|
||||
|
||||
const imagePlatforms = new Map(
|
||||
[...buildPlatforms, ...testPlatforms]
|
||||
.filter(platform => buildImages && isUsingNewAgent(platform))
|
||||
.map(platform => [getImageKey(platform), platform]),
|
||||
);
|
||||
|
||||
/**
|
||||
* @type {Step[]}
|
||||
*/
|
||||
const steps = [];
|
||||
|
||||
if (imagePlatforms.size) {
|
||||
steps.push({
|
||||
group: ":docker:",
|
||||
steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform)),
|
||||
});
|
||||
}
|
||||
|
||||
for (const platform of buildPlatforms) {
|
||||
const { os, arch, abi, baseline } = platform;
|
||||
|
||||
/** @type {Step[]} */
|
||||
const platformSteps = [];
|
||||
|
||||
if (buildImages || !buildId) {
|
||||
platformSteps.push(
|
||||
getBuildVendorStep(platform),
|
||||
getBuildCppStep(platform),
|
||||
getBuildZigStep(platform),
|
||||
getBuildBunStep(platform),
|
||||
);
|
||||
}
|
||||
|
||||
if (!skipTests) {
|
||||
platformSteps.push(
|
||||
...testPlatforms
|
||||
.filter(
|
||||
testPlatform =>
|
||||
testPlatform.os === os &&
|
||||
testPlatform.arch === arch &&
|
||||
testPlatform.abi === abi &&
|
||||
testPlatform.baseline === baseline,
|
||||
)
|
||||
.map(testPlatform => getTestBunStep(testPlatform)),
|
||||
);
|
||||
}
|
||||
|
||||
if (!platformSteps.length) {
|
||||
continue;
|
||||
}
|
||||
|
||||
steps.push({
|
||||
key: getTargetKey(platform),
|
||||
group: getTargetLabel(platform),
|
||||
steps: platformSteps,
|
||||
});
|
||||
}
|
||||
|
||||
if (isMainBranch() && !isFork()) {
|
||||
steps.push({
|
||||
label: ":github:",
|
||||
agents: {
|
||||
queue: "test-darwin",
|
||||
},
|
||||
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
command: ".buildkite/scripts/upload-release.sh",
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
priority: getPriority(),
|
||||
steps,
|
||||
};
|
||||
}
|
||||
|
||||
async function main() {
|
||||
printEnvironment();
|
||||
|
||||
console.log("Checking last successful build...");
|
||||
const lastBuild = await getLastSuccessfulBuild();
|
||||
if (lastBuild) {
|
||||
const { id, path, commit_id: commit } = lastBuild;
|
||||
console.log(" - Build ID:", id);
|
||||
console.log(" - Build URL:", new URL(path, "https://buildkite.com/").toString());
|
||||
console.log(" - Commit:", commit);
|
||||
} else {
|
||||
console.log(" - No build found");
|
||||
}
|
||||
|
||||
let changedFiles;
|
||||
// FIXME: Fix various bugs when calculating changed files
|
||||
// false -> !isFork() && !isMainBranch()
|
||||
if (false) {
|
||||
console.log("Checking changed files...");
|
||||
const baseRef = lastBuild?.commit_id || getTargetBranch() || getMainBranch();
|
||||
console.log(" - Base Ref:", baseRef);
|
||||
const headRef = getCommit();
|
||||
console.log(" - Head Ref:", headRef);
|
||||
|
||||
changedFiles = await getChangedFiles(undefined, baseRef, headRef);
|
||||
if (changedFiles) {
|
||||
if (changedFiles.length) {
|
||||
changedFiles.forEach(filename => console.log(` - ${filename}`));
|
||||
} else {
|
||||
console.log(" - No changed files");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const isDocumentationFile = filename => /^(\.vscode|\.github|bench|docs|examples)|\.(md)$/i.test(filename);
|
||||
const isTestFile = filename => /^test/i.test(filename) || /runner\.node\.mjs$/i.test(filename);
|
||||
|
||||
console.log("Checking if CI should be forced...");
|
||||
let forceBuild;
|
||||
let ciFileChanged;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(force ci|ci force|ci force build)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
forceBuild = true;
|
||||
}
|
||||
for (const coref of [".buildkite/ci.mjs", "scripts/utils.mjs", "scripts/bootstrap.sh", "scripts/machine.mjs"]) {
|
||||
if (changedFiles && changedFiles.includes(coref)) {
|
||||
console.log(" - Yes, because the list of changed files contains:", coref);
|
||||
forceBuild = true;
|
||||
ciFileChanged = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if CI should be skipped...");
|
||||
if (!forceBuild) {
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(skip ci|no ci|ci skip|ci no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
return;
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are documentation");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if CI should re-build images...");
|
||||
let buildImages;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(build images?|images? build)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
buildImages = true;
|
||||
}
|
||||
if (ciFileChanged) {
|
||||
console.log(" - Yes, because a core CI file changed");
|
||||
buildImages = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if CI should publish images...");
|
||||
let publishImages;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(publish images?|images? publish)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
publishImages = true;
|
||||
buildImages = true;
|
||||
}
|
||||
if (ciFileChanged && isMainBranch()) {
|
||||
console.log(" - Yes, because a core CI file changed and this is main branch");
|
||||
publishImages = true;
|
||||
buildImages = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if build should be skipped...");
|
||||
let skipBuild;
|
||||
if (!forceBuild) {
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(only tests?|tests? only|skip build|no build|build skip|build no)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
skipBuild = true;
|
||||
}
|
||||
if (changedFiles && changedFiles.every(filename => isTestFile(filename) || isDocumentationFile(filename))) {
|
||||
console.log(" - Yes, because all changed files are tests or documentation");
|
||||
skipBuild = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if tests should be skipped...");
|
||||
let skipTests;
|
||||
{
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(skip tests?|tests? skip|no tests?|tests? no)\]/i.exec(message);
|
||||
if (match) {
|
||||
console.log(" - Yes, because commit message contains:", match[1]);
|
||||
skipTests = true;
|
||||
}
|
||||
if (isMainBranch()) {
|
||||
console.log(" - Yes, because we're on main branch");
|
||||
skipTests = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Checking if build is a named release...");
|
||||
let buildRelease;
|
||||
if (/^(1|true|on|yes)$/i.test(getEnv("RELEASE", false))) {
|
||||
console.log(" - Yes, because RELEASE environment variable is set");
|
||||
buildRelease = true;
|
||||
} else {
|
||||
const message = getCommitMessage();
|
||||
const match = /\[(release|release build|build release)\]/i.exec(message);
|
||||
if (match) {
|
||||
const [, reason] = match;
|
||||
console.log(" - Yes, because commit message contains:", reason);
|
||||
buildRelease = true;
|
||||
}
|
||||
}
|
||||
|
||||
console.log("Generating pipeline...");
|
||||
const pipeline = getPipeline({
|
||||
buildId: lastBuild && skipBuild && !forceBuild ? lastBuild.id : undefined,
|
||||
buildImages,
|
||||
publishImages,
|
||||
skipTests,
|
||||
});
|
||||
|
||||
const content = toYaml(pipeline);
|
||||
const contentPath = join(process.cwd(), ".buildkite", "ci.yml");
|
||||
writeFileSync(contentPath, content);
|
||||
|
||||
console.log("Generated pipeline:");
|
||||
console.log(" - Path:", contentPath);
|
||||
console.log(" - Size:", (content.length / 1024).toFixed(), "KB");
|
||||
if (isBuildkite) {
|
||||
await uploadArtifact(contentPath);
|
||||
}
|
||||
|
||||
if (isBuildkite) {
|
||||
console.log("Setting canary revision...");
|
||||
const canaryRevision = buildRelease ? 0 : await getCanaryRevision();
|
||||
await spawnSafe(["buildkite-agent", "meta-data", "set", "canary", `${canaryRevision}`], { stdio: "inherit" });
|
||||
|
||||
console.log("Uploading pipeline...");
|
||||
await spawnSafe(["buildkite-agent", "pipeline", "upload", contentPath], { stdio: "inherit" });
|
||||
}
|
||||
}
|
||||
|
||||
await main();
|
||||
@@ -1,11 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
run_command node ".buildkite/ci.mjs"
|
||||
@@ -1,248 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
echo "info: Skipping canary release because this is a release build"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
|
||||
echo "error: Cannot upload release from a pull request"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_BRANCH" != "main" ]; then
|
||||
echo "error: Cannot upload release from a branch other than main"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
sleep 5 # There is possibly a race condition where this overwrites artifacts?
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function send_bench_webhook() {
|
||||
if [ -z "$BENCHMARK_URL" ]; then
|
||||
echo "error: \$BENCHMARK_URL is not set"
|
||||
# exit 1 # TODO: this isn't live yet
|
||||
return
|
||||
fi
|
||||
|
||||
local tag="$1"
|
||||
local commit="$BUILDKITE_COMMIT"
|
||||
local artifact_path="${commit}"
|
||||
|
||||
if [ "$tag" == "canary" ]; then
|
||||
artifact_path="${commit}-canary"
|
||||
fi
|
||||
|
||||
local artifact_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/$artifact_path/bun-linux-x64.zip"
|
||||
local webhook_url="$BENCHMARK_URL?tag=$tag&commit=$commit&artifact_url=$artifact_url"
|
||||
|
||||
curl -X POST "$webhook_url"
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-linux-aarch64-musl.zip
|
||||
bun-linux-aarch64-musl-profile.zip
|
||||
bun-linux-x64-musl.zip
|
||||
bun-linux-x64-musl-profile.zip
|
||||
bun-linux-x64-musl-baseline.zip
|
||||
bun-linux-x64-musl-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
|
||||
else
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
fi
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
wait
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact"
|
||||
done
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
send_bench_webhook "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
@@ -1,9 +0,0 @@
|
||||
WarningsAsErrors: "*"
|
||||
FormatStyle: webkit
|
||||
Checks: >
|
||||
-*,
|
||||
clang-analyzer-*,
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.RefCntblBaseVirtualDtor
|
||||
2
.clangd
2
.clangd
@@ -1,5 +1,3 @@
|
||||
Index:
|
||||
Background: Skip # Disable slow background indexing of these files.
|
||||
|
||||
CompileFlags:
|
||||
CompilationDatabase: build/debug
|
||||
|
||||
1539
.docker/chrome.json
Normal file
1539
.docker/chrome.json
Normal file
File diff suppressed because it is too large
Load Diff
14
.docker/chromium.pref
Normal file
14
.docker/chromium.pref
Normal file
@@ -0,0 +1,14 @@
|
||||
# Note: 2 blank lines are required between entries
|
||||
Package: *
|
||||
Pin: release a=eoan
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: *
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 300
|
||||
|
||||
# Pattern includes 'chromium', 'chromium-browser' and similarly
|
||||
# named dependencies:
|
||||
Package: chromium*
|
||||
Pin: origin "ftp.debian.org"
|
||||
Pin-Priority: 700
|
||||
8
.docker/copy-bun-binary.sh
Normal file
8
.docker/copy-bun-binary.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
name=$(openssl rand -hex 12)
|
||||
id=$(docker create --name=bun-binary-$name $CONTAINER_TAG)
|
||||
docker container cp bun-binary-$name:$BUN_RELEASE_DIR bun-binary
|
||||
echo -e "bun-binary-$name"
|
||||
3
.docker/debian.list
Normal file
3
.docker/debian.list
Normal file
@@ -0,0 +1,3 @@
|
||||
deb http://deb.debian.org/debian buster main
|
||||
deb http://deb.debian.org/debian buster-updates main
|
||||
deb http://deb.debian.org/debian-security buster/updates main
|
||||
34
.docker/dockerfile-common.sh
Normal file
34
.docker/dockerfile-common.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
export DOCKER_BUILDKIT=1
|
||||
|
||||
export BUILDKIT_ARCH=$(uname -m)
|
||||
export ARCH=${BUILDKIT_ARCH}
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "amd64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "x86_64" ]; then
|
||||
export BUILDKIT_ARCH="amd64"
|
||||
export ARCH=x64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "arm64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "aarch64" ]; then
|
||||
export BUILDKIT_ARCH="arm64"
|
||||
export ARCH=aarch64
|
||||
fi
|
||||
|
||||
if [ "$BUILDKIT_ARCH" == "armv7l" ]; then
|
||||
echo "Unsupported platform: $BUILDKIT_ARCH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export BUILD_ID=$(cat build-id)
|
||||
export CONTAINER_NAME=bun-linux-$ARCH
|
||||
export DEBUG_CONTAINER_NAME=debug-bun-linux-$ARCH
|
||||
export TEMP=/tmp/bun-0.0.$BUILD_ID
|
||||
11
.docker/pull.sh
Normal file
11
.docker/pull.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker pull bunbunbunbun/bun-test-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base:latest --platform=linux/amd64
|
||||
docker pull bunbunbunbun/bun-base-with-zig-and-webkit:latest --platform=linux/amd64
|
||||
|
||||
docker tag bunbunbunbun/bun-test-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base:latest bun-base:latest
|
||||
docker tag bunbunbunbun/bun-base-with-zig-and-webkit:latest bun-base-with-zig-and-webkit:latest
|
||||
47
.docker/run-dockerfile.sh
Normal file
47
.docker/run-dockerfile.sh
Normal file
@@ -0,0 +1,47 @@
|
||||
#!/bin/bash
|
||||
|
||||
source "dockerfile-common.sh"
|
||||
|
||||
export $CONTAINER_NAME=$CONTAINER_NAME-local
|
||||
|
||||
rm -rf $TEMP
|
||||
mkdir -p $TEMP
|
||||
|
||||
docker build . --target release --progress=plain -t $CONTAINER_NAME:latest --build-arg BUILDKIT_INLINE_CACHE=1 --platform=linux/$BUILDKIT_ARCH --cache-from $CONTAINER_NAME:latest
|
||||
|
||||
if (($?)); then
|
||||
echo "Failed to build container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
id=$(docker create $CONTAINER_NAME:latest)
|
||||
docker cp $id:/home/ubuntu/bun-release $TEMP/$CONTAINER_NAME
|
||||
if (($?)); then
|
||||
echo "Failed to cp container"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd $TEMP
|
||||
mkdir -p $TEMP/$CONTAINER_NAME $TEMP/$DEBUG_CONTAINER_NAME
|
||||
mv $CONTAINER_NAME/bun-profile $DEBUG_CONTAINER_NAME/bun
|
||||
zip -r $CONTAINER_NAME.zip $CONTAINER_NAME
|
||||
zip -r $DEBUG_CONTAINER_NAME.zip $DEBUG_CONTAINER_NAME
|
||||
docker rm -v $id
|
||||
abs=$(realpath $TEMP/$CONTAINER_NAME.zip)
|
||||
debug_abs=$(realpath $TEMP/$DEBUG_CONTAINER_NAME.zip)
|
||||
|
||||
case $(uname -s) in
|
||||
"Linux") target="linux" ;;
|
||||
*) target="other" ;;
|
||||
esac
|
||||
|
||||
if [ "$target" = "linux" ]; then
|
||||
if command -v bun --version >/dev/null; then
|
||||
cp $TEMP/$CONTAINER_NAME/bun $(which bun)
|
||||
cp $TEMP/$DEBUG_CONTAINER_NAME/bun $(which bun-profile)
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Saved to:"
|
||||
echo $debug_abs
|
||||
echo $abs
|
||||
9
.docker/run-test.sh
Executable file
9
.docker/run-test.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
bun install
|
||||
bun install --cwd ./test/snippets
|
||||
bun install --cwd ./test/scripts
|
||||
|
||||
make $BUN_TEST_NAME
|
||||
5
.docker/runner.sh
Normal file
5
.docker/runner.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --env BUN_TEST_NAME=$BUN_TEST_NAME --ulimit memlock=-1:-1 --init --rm bun-test:latest
|
||||
5
.docker/unit-tests.sh
Normal file
5
.docker/unit-tests.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
docker container run --security-opt seccomp=.docker/chrome.json --env GITHUB_WORKSPACE=$GITHUB_WORKSPACE --ulimit memlock=-1:-1 --init --rm bun-unit-tests:latest
|
||||
@@ -11,8 +11,5 @@ packages/**/bun-profile
|
||||
src/bun.js/WebKit
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-build
|
||||
.zig-cache
|
||||
zig-out
|
||||
build
|
||||
vendor
|
||||
node_modules
|
||||
zig-cache
|
||||
zig-out
|
||||
10
.gitattributes
vendored
10
.gitattributes
vendored
@@ -7,7 +7,6 @@
|
||||
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
@@ -44,10 +43,5 @@ test/**/* linguist-documentation
|
||||
bench/**/* linguist-documentation
|
||||
examples/**/* linguist-documentation
|
||||
|
||||
vendor/*.c linguist-vendored
|
||||
vendor/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
test/js/bun/css/files linguist-vendored
|
||||
src/deps/*.c linguist-vendored
|
||||
src/deps/brotli/** linguist-vendored
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
4
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -1,8 +1,6 @@
|
||||
name: 🐛 Bug Report
|
||||
description: Report an issue that should be fixed
|
||||
labels:
|
||||
- bug
|
||||
- needs triage
|
||||
labels: [bug]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: 🇹 TypeScript Type Bug Report
|
||||
description: Report an issue with TypeScript types
|
||||
labels: [bug, types]
|
||||
labels: [bug, typescript]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
12
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
12
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -1,17 +1,23 @@
|
||||
name: Prefilled crash report
|
||||
description: Report a crash in Bun
|
||||
labels:
|
||||
- bug
|
||||
- crash
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
Thank you so much for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide instructions on how to reproduce the crash.
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
label: JavaScript/TypeScript code that reproduces the crash?
|
||||
description: If this crash happened in the Bun runtime, can you paste code we can run to reproduce the crash?
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
name: bun install crash report
|
||||
description: Report a crash in bun install
|
||||
labels:
|
||||
- npm
|
||||
- crash
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: package_json
|
||||
attributes:
|
||||
label: "`package.json` file"
|
||||
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
|
||||
render: json
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
43
.github/actions/bump/action.yml
vendored
43
.github/actions/bump/action.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Bump version
|
||||
description: Bump the version of Bun
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: The most recent version of Bun.
|
||||
required: true
|
||||
type: string
|
||||
token:
|
||||
description: The GitHub token to use for creating a pull request.
|
||||
required: true
|
||||
type: string
|
||||
default: ${{ github.token }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Run Bump
|
||||
shell: bash
|
||||
id: bump
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
|
||||
LATEST=$(cat LATEST)
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
LATEST
|
||||
token: ${{ inputs.token }}
|
||||
commit-message: Bump version to ${{ steps.bump.outputs.version }}
|
||||
title: Bump to ${{ steps.bump.outputs.version }}
|
||||
delete-branch: true
|
||||
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
${{ steps.bump.outputs.message }}
|
||||
|
||||
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)
|
||||
3
.github/actions/setup-bun/action.yml
vendored
3
.github/actions/setup-bun/action.yml
vendored
@@ -42,10 +42,9 @@ runs:
|
||||
canary) release="canary";;
|
||||
*) release="bun-v${{ inputs.bun-version }}";;
|
||||
esac
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip"
|
||||
unzip ${target}.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/*
|
||||
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
|
||||
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}
|
||||
|
||||
310
.github/workflows/build-darwin.yml
vendored
Normal file
310
.github/workflows/build-darwin.yml
vendored
Normal file
@@ -0,0 +1,310 @@
|
||||
name: Build Darwin
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: macos-12-large
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
BUN_VERSION: 1.1.2
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
print_versions() {
|
||||
git submodule | grep -v WebKit
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
cat $(echo scripts/build*.sh scripts/all-dependencies.sh | tr " " "\n" | sort)
|
||||
}
|
||||
echo "hash=$(print_versions | shasum)" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
./scripts/update-submodules.sh
|
||||
- name: Build Submodules
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
run: |
|
||||
mkdir -p $BUN_DEPS_OUT_DIR
|
||||
./scripts/all-dependencies.sh
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
SOURCE_DIR: ${{ github.workspace }}
|
||||
OBJ_DIR: ${{ runner.temp }}/bun-cpp-obj
|
||||
BUN_DEPS_OUT_DIR: ${{ runner.temp }}/bun-deps
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
mkdir -p $OBJ_DIR
|
||||
cd $OBJ_DIR
|
||||
cmake -S $SOURCE_DIR -B $OBJ_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_CPP_ONLY=1 \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
chmod +x compile-cpp-only.sh
|
||||
./compile-cpp-only.sh -v
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: darwin
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
# TODO: Figure out how to cache homebrew dependencies
|
||||
- name: Install Dependencies
|
||||
env:
|
||||
HOMEBREW_NO_INSTALLED_DEPENDENTS_CHECK: 1
|
||||
HOMEBREW_NO_AUTO_UPDATE: 1
|
||||
HOMEBREW_NO_INSTALL_CLEANUP: 1
|
||||
run: |
|
||||
brew install \
|
||||
llvm@${{ env.LLVM_VERSION }} \
|
||||
ccache \
|
||||
rust \
|
||||
pkg-config \
|
||||
coreutils \
|
||||
libtool \
|
||||
cmake \
|
||||
libiconv \
|
||||
automake \
|
||||
openssl@1.1 \
|
||||
ninja \
|
||||
golang \
|
||||
gnu-sed --force --overwrite
|
||||
echo "$(brew --prefix ccache)/bin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix coreutils)/libexec/gnubin" >> $GITHUB_PATH
|
||||
echo "$(brew --prefix llvm@$LLVM_VERSION)/bin" >> $GITHUB_PATH
|
||||
brew link --overwrite llvm@$LLVM_VERSION
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: ${{ runner.temp }}/bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: ${{ runner.temp }}/bun-cpp-obj
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ${{ runner.temp }}/ccache
|
||||
run: |
|
||||
SRC_DIR=$PWD
|
||||
mkdir ${{ runner.temp }}/link-build
|
||||
cd ${{ runner.temp }}/link-build
|
||||
cmake $SRC_DIR \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${{ runner.temp }}/release/bun-zig.o" \
|
||||
-DBUN_CPP_ARCHIVE="${{ runner.temp }}/bun-cpp-obj/bun-cpp-objects.a" \
|
||||
-DBUN_DEPS_OUT_DIR="${{ runner.temp }}/bun-deps" \
|
||||
-DNO_CONFIGURE_DEPENDS=1
|
||||
ninja -v
|
||||
- name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/link-build
|
||||
chmod +x bun-profile bun
|
||||
mkdir -p bun-${{ inputs.tag }}-profile/ bun-${{ inputs.tag }}/
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/link-build/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
64
.github/workflows/build-linux.yml
vendored
Normal file
64
.github/workflows/build-linux.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Build Linux
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
zig-optimize:
|
||||
type: string
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build Linux
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: linux
|
||||
only-zig: false
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
zig-optimize: ${{ inputs.zig-optimize }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
337
.github/workflows/build-windows.yml
vendored
Normal file
337
.github/workflows/build-windows.yml
vendored
Normal file
@@ -0,0 +1,337 @@
|
||||
name: Build Windows
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: windows
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
canary:
|
||||
type: boolean
|
||||
no-cache:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
# Must specify exact version of LLVM for Windows
|
||||
LLVM_VERSION: 16.0.6
|
||||
BUN_VERSION: 1.1.2
|
||||
|
||||
jobs:
|
||||
build-submodules:
|
||||
name: Build Submodules
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.gitmodules
|
||||
src/deps
|
||||
scripts
|
||||
- name: Hash Submodules
|
||||
id: hash
|
||||
run: |
|
||||
$data = "$(& {
|
||||
git submodule | Where-Object { $_ -notmatch 'WebKit' }
|
||||
echo "LLVM_VERSION=${{ env.LLVM_VERSION }}"
|
||||
Get-Content -Path (Get-ChildItem -Path 'scripts/build*.ps1', 'scripts/all-dependencies.ps1', 'scripts/env.ps1' | Sort-Object -Property Name).FullName | Out-String
|
||||
echo 1
|
||||
})"
|
||||
$hash = ( -join ((New-Object -TypeName System.Security.Cryptography.SHA1CryptoServiceProvider).ComputeHash([System.Text.Encoding]::UTF8.GetBytes($data)) | ForEach-Object { $_.ToString("x2") } )).Substring(0, 10)
|
||||
echo "hash=${hash}" >> $env:GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
id: cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: bun-${{ inputs.tag }}-deps-${{ steps.hash.outputs.hash }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Clone Submodules
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
- if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
name: Build Dependencies
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Invoke-WebRequest -Uri "https://www.nasm.us/pub/nasm/releasebuilds/2.16.01/win64/nasm-2.16.01-win64.zip" -OutFile nasm.zip
|
||||
Expand-Archive nasm.zip (mkdir -Force "nasm")
|
||||
$Nasm = (Get-ChildItem "nasm")
|
||||
$env:Path += ";${Nasm}"
|
||||
$env:BUN_DEPS_OUT_DIR = (mkdir -Force "./bun-deps")
|
||||
.\scripts\all-dependencies.ps1
|
||||
- name: Save Cache
|
||||
if: ${{ inputs.no-cache || !steps.cache.outputs.cache-hit }}
|
||||
uses: actions/cache/save@v4
|
||||
with:
|
||||
path: bun-deps
|
||||
key: ${{ steps.cache.outputs.cache-primary-key }}
|
||||
- name: Upload bun-${{ inputs.tag }}-deps
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
if-no-files-found: error
|
||||
codegen:
|
||||
name: Codegen
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Codegen
|
||||
run: |
|
||||
./scripts/cross-compile-codegen.sh win32 x64
|
||||
- if: ${{ inputs.canary }}
|
||||
name: Calculate Revision
|
||||
run: |
|
||||
echo "canary_revision=$(GITHUB_TOKEN="${{ github.token }}"
|
||||
bash ./scripts/calculate-canary-revision.sh --raw)" > build-codegen-win32-x64/.canary_revision
|
||||
- name: Upload bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build-codegen-win32-x64
|
||||
if-no-files-found: error
|
||||
build-cpp:
|
||||
name: Build C++
|
||||
needs: codegen
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- name: Compile
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\build-libuv.ps1 -CloneOnly $True
|
||||
cd build
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_CPP_ONLY=1 ${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
.\compile-cpp-only.ps1 -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "C++ compilation failed" }
|
||||
- name: Upload bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: build/bun-cpp-objects.a
|
||||
if-no-files-found: error
|
||||
build-zig:
|
||||
name: Build Zig
|
||||
uses: ./.github/workflows/build-zig.yml
|
||||
with:
|
||||
os: windows
|
||||
zig-optimize: ReleaseSafe
|
||||
only-zig: true
|
||||
tag: ${{ inputs.tag }}
|
||||
arch: ${{ inputs.arch }}
|
||||
cpu: ${{ inputs.cpu }}
|
||||
assertions: ${{ inputs.assertions }}
|
||||
canary: ${{ inputs.canary }}
|
||||
no-cache: ${{ inputs.no-cache }}
|
||||
link:
|
||||
name: Link
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
needs:
|
||||
- build-submodules
|
||||
- build-cpp
|
||||
- build-zig
|
||||
- codegen
|
||||
steps:
|
||||
- name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Install LLVM
|
||||
uses: KyleMayes/install-llvm-action@8b37482c5a2997a3ab5dbf6561f8109e2eaa7d3b
|
||||
with:
|
||||
version: ${{ env.LLVM_VERSION }}
|
||||
- name: Install Ninja
|
||||
run: |
|
||||
choco install -y ninja
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Download bun-${{ inputs.tag }}-deps
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-deps
|
||||
path: bun-deps
|
||||
- name: Download bun-${{ inputs.tag }}-cpp
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-cpp
|
||||
path: bun-cpp
|
||||
- name: Download bun-${{ inputs.tag }}-zig
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: bun-zig
|
||||
- name: Download bun-${{ inputs.tag }}-codegen
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-codegen
|
||||
path: build
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ccache
|
||||
key: bun-${{ inputs.tag }}-cpp-${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-cpp-
|
||||
- name: Link
|
||||
env:
|
||||
CPU_TARGET: ${{ inputs.cpu }}
|
||||
CCACHE_DIR: ccache
|
||||
run: |
|
||||
.\scripts\update-submodules.ps1
|
||||
.\scripts\env.ps1 ${{ contains(inputs.tag, '-baseline') && '-Baseline' || '' }}
|
||||
Set-Location build
|
||||
# $CANARY_REVISION = if (Test-Path build/.canary_revision) { Get-Content build/.canary_revision } else { "0" }
|
||||
$CANARY_REVISION = 0
|
||||
cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Release `
|
||||
-DNO_CODEGEN=1 `
|
||||
-DNO_CONFIGURE_DEPENDS=1 `
|
||||
"-DCANARY=${CANARY_REVISION}" `
|
||||
-DBUN_LINK_ONLY=1 `
|
||||
"-DBUN_DEPS_OUT_DIR=$(Resolve-Path ../bun-deps)" `
|
||||
"-DBUN_CPP_ARCHIVE=$(Resolve-Path ../bun-cpp/bun-cpp-objects.a)" `
|
||||
"-DBUN_ZIG_OBJ=$(Resolve-Path ../bun-zig/bun-zig.o)" `
|
||||
${{ contains(inputs.tag, '-baseline') && '-DUSE_BASELINE_BUILD=1' || '' }}
|
||||
if ($LASTEXITCODE -ne 0) { throw "CMake configuration failed" }
|
||||
ninja -v
|
||||
if ($LASTEXITCODE -ne 0) { throw "Link failed!" }
|
||||
- name: Prepare
|
||||
run: |
|
||||
$Dist = mkdir -Force "bun-${{ inputs.tag }}"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
Compress-Archive -Force "$Dist" "${Dist}.zip"
|
||||
$Dist = "$Dist-profile"
|
||||
MkDir -Force "$Dist"
|
||||
cp -r build\bun.exe "$Dist\bun.exe"
|
||||
cp -r build\bun.pdb "$Dist\bun.pdb"
|
||||
Compress-Archive -Force "$Dist" "$Dist.zip"
|
||||
$env:BUN_GARBAGE_COLLECTOR_LEVEL = "1"
|
||||
$env:BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING = "1"
|
||||
.\build\bun.exe --print "JSON.stringify(require('bun:internal-for-testing').crash_handler.getFeatureData())" > .\features.json
|
||||
- name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
- name: Upload bun-feature-data
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-feature-data
|
||||
path: features.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
on-failure:
|
||||
if: ${{ github.repository_owner == 'oven-sh' && failure() }}
|
||||
name: On Failure
|
||||
needs: link
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, the build for bun-${{ inputs.tag }} failed.
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
122
.github/workflows/build-zig.yml
vendored
Normal file
122
.github/workflows/build-zig.yml
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
name: Build Zig
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
default: ${{ github.repository_owner != 'oven-sh' && 'ubuntu-latest' || inputs.only-zig && 'namespace-profile-bun-ci-linux-x64' || inputs.arch == 'x64' && 'namespace-profile-bun-ci-linux-x64' || 'namespace-profile-bun-ci-linux-aarch64' }}
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
os:
|
||||
type: string
|
||||
required: true
|
||||
arch:
|
||||
type: string
|
||||
required: true
|
||||
cpu:
|
||||
type: string
|
||||
required: true
|
||||
assertions:
|
||||
type: boolean
|
||||
default: false
|
||||
zig-optimize:
|
||||
type: string # 'ReleaseSafe' or 'ReleaseFast'
|
||||
default: ReleaseFast
|
||||
canary:
|
||||
type: boolean
|
||||
default: ${{ github.ref == 'refs/heads/main' }}
|
||||
only-zig:
|
||||
type: boolean
|
||||
default: true
|
||||
no-cache:
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
build-zig:
|
||||
name: ${{ inputs.only-zig && 'Build Zig' || 'Build & Link' }}
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Calculate Cache Key
|
||||
id: cache
|
||||
run: |
|
||||
echo "key=${{ hashFiles('Dockerfile', 'Makefile', 'CMakeLists.txt', 'build.zig', 'scripts/**', 'src/**', 'packages/bun-usockets/src/**', 'packages/bun-uws/src/**') }}" >> $GITHUB_OUTPUT
|
||||
- if: ${{ !inputs.no-cache }}
|
||||
name: Restore Cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: bun-${{ inputs.tag }}-docker-${{ steps.cache.outputs.key }}
|
||||
restore-keys: |
|
||||
bun-${{ inputs.tag }}-docker-
|
||||
path: |
|
||||
${{ runner.temp }}/dockercache
|
||||
- name: Setup Docker
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
install: true
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
push: false
|
||||
target: ${{ inputs.only-zig && 'build_release_obj' || 'artifact' }}
|
||||
cache-from: |
|
||||
type=local,src=${{ runner.temp }}/dockercache
|
||||
cache-to: |
|
||||
type=local,dest=${{ runner.temp }}/dockercache,mode=max
|
||||
outputs: |
|
||||
type=local,dest=${{ runner.temp }}/release
|
||||
platforms: |
|
||||
linux/${{ runner.arch == 'X64' && 'amd64' || 'arm64' }}
|
||||
build-args: |
|
||||
GIT_SHA=${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
TRIPLET=${{ inputs.os == 'darwin' && format('{0}-macos-none', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || inputs.os == 'windows' && format('{0}-windows-msvc', inputs.arch == 'x64' && 'x86_64' || 'aarch64') || format('{0}-linux-gnu', inputs.arch == 'x64' && 'x86_64' || 'aarch64') }}
|
||||
ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
BUILDARCH=${{ inputs.arch == 'x64' && 'amd64' || 'arm64' }}
|
||||
BUILD_MACHINE_ARCH=${{ inputs.arch == 'x64' && 'x86_64' || 'aarch64' }}
|
||||
CPU_TARGET=${{ inputs.arch == 'x64' && inputs.cpu || 'native' }}
|
||||
ASSERTIONS=${{ inputs.assertions && 'ON' || 'OFF' }}
|
||||
ZIG_OPTIMIZE=${{ inputs.zig-optimize }}
|
||||
CANARY=${{ inputs.canary && '1' || '0' }}
|
||||
- if: ${{ inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-zig
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-zig
|
||||
path: ${{ runner.temp }}/release/bun-zig.o
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Prepare
|
||||
run: |
|
||||
cd ${{ runner.temp }}/release
|
||||
chmod +x bun-profile bun
|
||||
mkdir bun-${{ inputs.tag }}-profile
|
||||
mkdir bun-${{ inputs.tag }}
|
||||
strip bun
|
||||
mv bun-profile bun-${{ inputs.tag }}-profile/bun-profile
|
||||
mv bun bun-${{ inputs.tag }}/bun
|
||||
zip -r bun-${{ inputs.tag }}-profile.zip bun-${{ inputs.tag }}-profile
|
||||
zip -r bun-${{ inputs.tag }}.zip bun-${{ inputs.tag }}
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}.zip
|
||||
if-no-files-found: error
|
||||
- if: ${{ !inputs.only-zig }}
|
||||
name: Upload bun-${{ inputs.tag }}-profile
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-profile
|
||||
path: ${{ runner.temp }}/release/bun-${{ inputs.tag }}-profile.zip
|
||||
if-no-files-found: error
|
||||
242
.github/workflows/ci.yml
vendored
Normal file
242
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
name: CI
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: write
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event_name == 'workflow_dispatch' && inputs.run-id || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run-id:
|
||||
type: string
|
||||
description: The workflow ID to download artifacts (skips the build step)
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths-ignore:
|
||||
- .vscode/**/*
|
||||
- docs/**/*
|
||||
- examples/**/*
|
||||
|
||||
jobs:
|
||||
format:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Format
|
||||
uses: ./.github/workflows/run-format.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
zig-version: 0.12.0-dev.1828+225fe6ddb
|
||||
permissions:
|
||||
contents: write
|
||||
lint:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Lint
|
||||
uses: ./.github/workflows/run-lint.yml
|
||||
secrets: inherit
|
||||
linux-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
linux-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
linux-aarch64:
|
||||
if: ${{ !inputs.run-id && github.repository_owner == 'oven-sh' }}
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
darwin-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
darwin-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
darwin-aarch64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: true
|
||||
windows-x64:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: true
|
||||
windows-x64-baseline:
|
||||
if: ${{ !inputs.run-id }}
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: true
|
||||
linux-x64-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64
|
||||
needs: linux-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
linux-x64-baseline-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test linux-x64-baseline
|
||||
needs: linux-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
linux-aarch64-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' && github.repository_owner == 'oven-sh'}}
|
||||
name: Test linux-aarch64
|
||||
needs: linux-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
darwin-x64-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64
|
||||
needs: darwin-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
darwin-x64-baseline-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-x64-baseline
|
||||
needs: darwin-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
darwin-aarch64-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test darwin-aarch64
|
||||
needs: darwin-aarch64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
windows-x64-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64
|
||||
needs: windows-x64
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
windows-x64-baseline-test:
|
||||
if: ${{ inputs.run-id && always() || github.event_name == 'pull_request' }}
|
||||
name: Test windows-x64-baseline
|
||||
needs: windows-x64-baseline
|
||||
uses: ./.github/workflows/run-test.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
run-id: ${{ inputs.run-id }}
|
||||
pr-number: ${{ github.event.number }}
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
cleanup:
|
||||
if: ${{ always() }}
|
||||
name: Cleanup
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Cleanup Artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
name: |
|
||||
bun-*-cpp
|
||||
bun-*-zig
|
||||
bun-*-deps
|
||||
bun-*-codegen
|
||||
41
.github/workflows/clang-format.yml
vendored
41
.github/workflows/clang-format.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: clang-format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
LLVM_VERSION: "18.1.8"
|
||||
LLVM_VERSION_MAJOR: "18"
|
||||
|
||||
jobs:
|
||||
clang-format:
|
||||
name: clang-format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Install LLVM
|
||||
run: |
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Clang Format
|
||||
env:
|
||||
LLVM_VERSION: ${{ env.LLVM_VERSION }}
|
||||
run: |
|
||||
bun run clang-format
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run clang-format`"
|
||||
41
.github/workflows/clang-tidy.yml
vendored
41
.github/workflows/clang-tidy.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: clang-tidy
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
LLVM_VERSION: "18.1.8"
|
||||
LLVM_VERSION_MAJOR: "18"
|
||||
|
||||
jobs:
|
||||
clang-tidy:
|
||||
name: clang-tidy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Install LLVM
|
||||
run: |
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Clang Tidy
|
||||
env:
|
||||
LLVM_VERSION: ${{ env.LLVM_VERSION }}
|
||||
run: |
|
||||
bun run clang-tidy:diff
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run clang-tidy`"
|
||||
85
.github/workflows/comment-lint.yml
vendored
85
.github/workflows/comment-lint.yml
vendored
@@ -1,85 +0,0 @@
|
||||
name: C++ Linter comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- lint-cpp
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment-lint:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Comment
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: format.log
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: PR Number
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Did Fail
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: did_fail.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
# Copy to outputs
|
||||
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo 'text_output<<EOF'
|
||||
cat format.log
|
||||
echo EOF
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
- name: Update Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail != '0'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
|
||||
|
||||
```cpp
|
||||
${{ steps.env.outputs.text_output }}
|
||||
```
|
||||
|
||||
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
- name: Update Previous Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
clang-tidy nits are fixed! Thank you.
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
121
.github/workflows/comment.yml
vendored
Normal file
121
.github/workflows/comment.yml
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
name: Comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Tests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*-tests
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
echo "pr-number=$(<bun/bun-linux-x64-tests/pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
- name: Generate Comment
|
||||
run: |
|
||||
cat bun/bun-*-tests/comment.md > comment.md
|
||||
if [ -s comment.md ]; then
|
||||
echo -e "❌ @${{ github.actor }}, your commit has failing tests :(\n\n$(cat comment.md)" > comment.md
|
||||
else
|
||||
echo -e "✅ @${{ github.actor }}, all tests passed!" > comment.md
|
||||
fi
|
||||
echo -e "\n**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.event.workflow_run.id }})**" >> comment.md
|
||||
echo -e "<!-- generated-comment workflow=${{ github.workflow }} -->" >> comment.md
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
- name: Write Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body-path: comment.md
|
||||
edit-mode: replace
|
||||
comment-lint:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Comment
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: format.log
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: PR Number
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: PR Number
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: did_fail.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
# Copy to outputs
|
||||
echo "pr-number=$(<pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
echo "text_output=$(<format.log)" >> $GITHUB_OUTPUT
|
||||
echo "did_fail=$(<did_fail.txt)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
- name: Update Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail != '0'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
@${{ github.actor }}, C++ linting failed!
|
||||
|
||||
Logs:
|
||||
```cpp
|
||||
${{ steps.env.outputs.text_output }}
|
||||
```
|
||||
|
||||
<!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
- name: Update Previous Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
@${{ github.actor }}, C++ lint errors fixed! Thank you.
|
||||
|
||||
<!-- generated-comment workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
183
.github/workflows/create-release-build.yml
vendored
Normal file
183
.github/workflows/create-release-build.yml
vendored
Normal file
@@ -0,0 +1,183 @@
|
||||
name: Create Release Build
|
||||
run-name: Compile Bun v${{ inputs.version }} by ${{ github.actor }}
|
||||
|
||||
concurrency:
|
||||
group: release
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
actions: write
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
required: true
|
||||
description: "Release version. Example: 1.1.4. Exclude the 'v' prefix."
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
description: "GitHub tag to use"
|
||||
clobber:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
description: "Overwrite existing release artifacts?"
|
||||
release:
|
||||
types:
|
||||
- created
|
||||
|
||||
jobs:
|
||||
notify-start:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Notify Start
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK_PUBLIC }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "Bun v${{ inputs.version }} is compiling"
|
||||
description: |
|
||||
### @${{ github.actor }} started compiling Bun v${{inputs.version}}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
linux-x64:
|
||||
name: Build linux-x64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
linux-x64-baseline:
|
||||
name: Build linux-x64-baseline
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-linux-x64' || 'ubuntu-latest' }}
|
||||
tag: linux-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
linux-aarch64:
|
||||
name: Build linux-aarch64
|
||||
uses: ./.github/workflows/build-linux.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: namespace-profile-bun-ci-linux-aarch64
|
||||
tag: linux-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
darwin-x64:
|
||||
name: Build darwin-x64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
darwin-x64-baseline:
|
||||
name: Build darwin-x64-baseline
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'macos-12-large' || 'macos-12' }}
|
||||
tag: darwin-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
darwin-aarch64:
|
||||
name: Build darwin-aarch64
|
||||
uses: ./.github/workflows/build-darwin.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: ${{ github.repository_owner == 'oven-sh' && 'namespace-profile-bun-ci-darwin-aarch64' || 'macos-12' }}
|
||||
tag: darwin-aarch64
|
||||
arch: aarch64
|
||||
cpu: native
|
||||
canary: false
|
||||
windows-x64:
|
||||
name: Build windows-x64
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64
|
||||
arch: x64
|
||||
cpu: haswell
|
||||
canary: false
|
||||
windows-x64-baseline:
|
||||
name: Build windows-x64-baseline
|
||||
uses: ./.github/workflows/build-windows.yml
|
||||
secrets: inherit
|
||||
with:
|
||||
runs-on: windows
|
||||
tag: windows-x64-baseline
|
||||
arch: x64
|
||||
cpu: nehalem
|
||||
canary: false
|
||||
|
||||
upload-artifacts:
|
||||
needs:
|
||||
- linux-x64
|
||||
- linux-x64-baseline
|
||||
- linux-aarch64
|
||||
- darwin-x64
|
||||
- darwin-x64-baseline
|
||||
- darwin-aarch64
|
||||
- windows-x64
|
||||
- windows-x64-baseline
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun-releases
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun-releases" ] || [ -z "$(ls -A bun-releases)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun-releases' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: "Bun v${{ inputs.version }} release artifacts uploaded"
|
||||
- name: "Upload Artifacts"
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
# Unzip one level deep each artifact
|
||||
cd bun-releases
|
||||
for f in *.zip; do
|
||||
unzip -o $f
|
||||
done
|
||||
cd ..
|
||||
gh release upload --repo=${{ github.repository }} ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.tag || github.event.release.id }} ${{ inputs.clobber && '--clobber' || '' }} bun-releases/*.zip
|
||||
171
.github/workflows/labeled.yml
vendored
171
.github/workflows/labeled.yml
vendored
@@ -1,171 +0,0 @@
|
||||
name: Issue Labeled
|
||||
env:
|
||||
BUN_VERSION: 1.1.13
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
# runs-on: ubuntu-latest
|
||||
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
|
||||
# permissions:
|
||||
# issues: write
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v4
|
||||
# with:
|
||||
# sparse-checkout: |
|
||||
# scripts
|
||||
# .github
|
||||
# CMakeLists.txt
|
||||
# - name: Setup Bun
|
||||
# uses: ./.github/actions/setup-bun
|
||||
# with:
|
||||
# bun-version: "1.1.24"
|
||||
# - name: "categorize bug"
|
||||
# id: add-labels
|
||||
# env:
|
||||
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
# shell: bash
|
||||
# run: |
|
||||
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
|
||||
# LABELS=$(bun scripts/label-issue.ts)
|
||||
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
# - name: Add labels
|
||||
# uses: actions-cool/issues-helper@v3
|
||||
# if: steps.add-labels.outputs.labels != ''
|
||||
# with:
|
||||
# actions: "add-labels"
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
scripts
|
||||
.github
|
||||
CMakeLists.txt
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.13"
|
||||
- name: "add platform and command label"
|
||||
id: add-labels
|
||||
if: github.event.label.name == 'crash'
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "outdated.txt" ]]; then
|
||||
echo "oudated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
|
||||
shell: bash
|
||||
run: |
|
||||
bun scripts/associate-issue-with-sentry.ts
|
||||
|
||||
if [[ -f "sentry-link.txt" ]]; then
|
||||
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "sentry-id.txt" ]]; then
|
||||
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.oudated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Thank you for reporting this crash.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment needs repro
|
||||
if: github.event.label.name == 'needs repro'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
89
.github/workflows/on-submodule-update.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
37
.github/workflows/prettier-format.yml
vendored
37
.github/workflows/prettier-format.yml
vendored
@@ -1,37 +0,0 @@
|
||||
name: prettier-format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
|
||||
jobs:
|
||||
prettier-format:
|
||||
name: prettier-format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier:diff
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run prettier:extra`"
|
||||
54
.github/workflows/release.yml
vendored
54
.github/workflows/release.yml
vendored
@@ -1,6 +1,3 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
@@ -66,7 +63,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.20"
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
@@ -88,13 +85,10 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# To workaround issue
|
||||
ref: main
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.20"
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -123,7 +117,7 @@ jobs:
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.20"
|
||||
bun-version: "1.0.21"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
@@ -265,7 +259,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.20"
|
||||
bun-version: "1.0.21"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
@@ -276,43 +270,3 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
bun-version: "1.1.12"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
token: ${{ github.token }}
|
||||
|
||||
47
.github/workflows/run-format.yml
vendored
Normal file
47
.github/workflows/run-format.yml
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
name: Format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
zig-version:
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
.github
|
||||
src
|
||||
packages
|
||||
test
|
||||
bench
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.0.21"
|
||||
- name: Setup Zig
|
||||
uses: goto-bus-stop/setup-zig@c7b6cdd3adba8f8b96984640ff172c37c93f73ee
|
||||
with:
|
||||
version: ${{ inputs.zig-version }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Format
|
||||
run: |
|
||||
bun fmt
|
||||
- name: Format Zig
|
||||
run: |
|
||||
bun fmt:zig
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Apply formatting changes
|
||||
2
.github/workflows/run-lint.yml
vendored
2
.github/workflows/run-lint.yml
vendored
@@ -2,8 +2,6 @@ name: Lint
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
LLVM_VERSION: 16
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
208
.github/workflows/run-test.yml
vendored
Normal file
208
.github/workflows/run-test.yml
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
name: Test
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
runs-on:
|
||||
type: string
|
||||
required: true
|
||||
tag:
|
||||
type: string
|
||||
required: true
|
||||
pr-number:
|
||||
type: string
|
||||
required: true
|
||||
run-id:
|
||||
type: string
|
||||
default: ${{ github.run_id }}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Tests
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
package.json
|
||||
bun.lockb
|
||||
test
|
||||
packages/bun-internal-test
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install
|
||||
- name: Install Dependencies (test)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd test
|
||||
- name: Install Dependencies (runner)
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
bun install --cwd packages/bun-internal-test
|
||||
- name: Run Tests
|
||||
id: test
|
||||
timeout-minutes: 90
|
||||
shell: bash
|
||||
env:
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_TAG: ${{ inputs.tag }}
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
SMTP_SENDGRID_SENDER: ${{ secrets.SMTP_SENDGRID_SENDER }}
|
||||
TLS_MONGODB_DATABASE_URL: ${{ secrets.TLS_MONGODB_DATABASE_URL }}
|
||||
TLS_POSTGRES_DATABASE_URL: ${{ secrets.TLS_POSTGRES_DATABASE_URL }}
|
||||
TEST_INFO_STRIPE: ${{ secrets.TEST_INFO_STRIPE }}
|
||||
SHELLOPTS: igncr
|
||||
run: |
|
||||
node packages/bun-internal-test/src/runner.node.mjs $(which bun)
|
||||
- if: ${{ always() }}
|
||||
name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-tests
|
||||
path: |
|
||||
test-report.*
|
||||
comment.md
|
||||
pr-number.txt
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
- if: ${{ always() && steps.test.outputs.failing_tests != '' && github.event.pull_request && github.repository_owner == 'oven-sh' }}
|
||||
name: Send Message
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#FF0000"
|
||||
title: ""
|
||||
description: |
|
||||
### ❌ [${{ github.event.pull_request.title }}](${{ github.event.pull_request.html_url }})
|
||||
|
||||
@${{ github.actor }}, there are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}.
|
||||
|
||||
${{ steps.test.outputs.failing_tests }}
|
||||
|
||||
**[View logs](https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }})**
|
||||
- name: Fail
|
||||
if: ${{ failure() || always() && steps.test.outputs.failing_tests != '' }}
|
||||
run: |
|
||||
echo "There are ${{ steps.test.outputs.failing_tests_count || 'some' }} failing tests on bun-${{ inputs.tag }}."
|
||||
exit 1
|
||||
test-node:
|
||||
name: Node.js Tests
|
||||
runs-on: ${{ inputs.runs-on }}
|
||||
steps:
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Git
|
||||
run: |
|
||||
git config --global core.autocrlf false
|
||||
git config --global core.eol lf
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
test/node.js
|
||||
- name: Setup Environment
|
||||
shell: bash
|
||||
run: |
|
||||
echo "${{ inputs.pr-number }}" > pr-number.txt
|
||||
- name: Download Bun
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}
|
||||
path: bun
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ inputs.run-id || github.run_id }}
|
||||
- if: ${{ runner.os != 'Windows' }}
|
||||
name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $GITHUB_PATH
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Cygwin
|
||||
uses: secondlife/setup-cygwin@v3
|
||||
with:
|
||||
packages: bash
|
||||
- if: ${{ runner.os == 'Windows' }}
|
||||
name: Setup Bun (Windows)
|
||||
run: |
|
||||
unzip bun/bun-*.zip
|
||||
cd bun-*
|
||||
pwd >> $env:GITHUB_PATH
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
- name: Checkout Tests
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
node runner.mjs --pull
|
||||
- name: Install Dependencies
|
||||
timeout-minutes: 5
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
run: |
|
||||
bun install
|
||||
- name: Run Tests
|
||||
timeout-minutes: 10 # Increase when more tests are added
|
||||
shell: bash
|
||||
working-directory: test/node.js
|
||||
env:
|
||||
TMPDIR: ${{ runner.temp }}
|
||||
BUN_GARBAGE_COLLECTOR_LEVEL: "0"
|
||||
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING: "true"
|
||||
run: |
|
||||
node runner.mjs
|
||||
- name: Upload Results
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-${{ inputs.tag }}-node-tests
|
||||
path: |
|
||||
test/node.js/summary/*.json
|
||||
if-no-files-found: error
|
||||
overwrite: true
|
||||
30
.github/workflows/stale.yaml
vendored
30
.github/workflows/stale.yaml
vendored
@@ -1,30 +0,0 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "15 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-issue-close: 5
|
||||
any-of-issue-labels: "needs repro,waiting-for-author"
|
||||
exempt-issue-labels: "neverstale"
|
||||
exempt-pr-labels: "neverstale"
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: 30
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
|
||||
close-pr-message: "This pull request has been closed due to inactivity."
|
||||
repo-token: ${{ github.token }}
|
||||
operations-per-run: 1000
|
||||
29
.github/workflows/test-bump.yml
vendored
29
.github/workflows/test-bump.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: Test Bump version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.1.12"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
token: ${{ github.token }}
|
||||
82
.github/workflows/upload.yml
vendored
Normal file
82
.github/workflows/upload.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Upload Artifacts
|
||||
run-name: Canary release ${{github.sha}} upload
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- CI
|
||||
types:
|
||||
- completed
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
upload:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Upload Artifacts
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: bun
|
||||
pattern: bun-*
|
||||
merge-multiple: true
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Check for Artifacts
|
||||
run: |
|
||||
if [ ! -d "bun" ] || [ -z "$(ls -A bun)" ]; then
|
||||
echo "Error: No artifacts were downloaded or 'bun' directory does not exist."
|
||||
exit 1 # Fail the job if the condition is met
|
||||
else
|
||||
echo "Artifacts downloaded successfully."
|
||||
fi
|
||||
- name: Upload to GitHub Releases
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
tag: canary
|
||||
name: Canary (${{ github.sha }})
|
||||
prerelease: true
|
||||
body: This canary release of Bun corresponds to the commit [${{ github.sha }}]
|
||||
allowUpdates: true
|
||||
replacesArtifacts: true
|
||||
generateReleaseNotes: true
|
||||
artifactErrorsFailBuild: true
|
||||
artifacts: bun/**/bun-*.zip
|
||||
token: ${{ github.token }}
|
||||
- name: Upload to S3 (using SHA)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
- name: Upload to S3 (using tag)
|
||||
uses: shallwefootball/s3-upload-action@4350529f410221787ccf424e50133cbc1b52704e
|
||||
with:
|
||||
endpoint: ${{ secrets.AWS_ENDPOINT }}
|
||||
aws_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
aws_bucket: ${{ secrets.AWS_BUCKET }}
|
||||
source_dir: bun
|
||||
destination_dir: releases/canary
|
||||
- name: Announce on Discord
|
||||
uses: sarisia/actions-status-discord@v1
|
||||
with:
|
||||
webhook: ${{ secrets.BUN_DISCORD_GITHUB_CHANNEL_WEBHOOK }}
|
||||
nodetail: true
|
||||
color: "#1F6FEB"
|
||||
title: "New Bun Canary available"
|
||||
url: https://github.com/oven-sh/bun/commit/${{ github.sha }}
|
||||
description: |
|
||||
A new canary build of Bun has been automatically uploaded. To upgrade, run:
|
||||
```sh
|
||||
bun upgrade --canary
|
||||
# bun upgrade --stable <- to downgrade
|
||||
```
|
||||
34
.github/workflows/zig-format.yml
vendored
34
.github/workflows/zig-format.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: zig-format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.1.27"
|
||||
|
||||
jobs:
|
||||
zig-format:
|
||||
name: zig-format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Zig Format
|
||||
run: |
|
||||
bun run zig-format:diff
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run zig-format`"
|
||||
41
.gitignore
vendored
41
.gitignore
vendored
@@ -15,7 +15,6 @@
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
.zig-cache
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
@@ -49,12 +48,12 @@
|
||||
/build-*/
|
||||
/bun-webkit
|
||||
/kcov-out
|
||||
/src/deps/libuv
|
||||
/test-report.json
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/testdir
|
||||
/test.zig
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
@@ -116,6 +115,11 @@ sign.json
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/c-ares/build
|
||||
src/deps/libiconv
|
||||
src/deps/openssl
|
||||
src/deps/PLCrashReporter/
|
||||
src/deps/s2n-tls
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/example
|
||||
@@ -136,35 +140,4 @@ txt.js
|
||||
x64
|
||||
yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
|
||||
# Dependencies (before CMake)
|
||||
# These can be removed in the far future
|
||||
/src/bun.js/WebKit
|
||||
/src/deps/WebKit
|
||||
/src/deps/boringssl
|
||||
/src/deps/brotli
|
||||
/src/deps/c*ares
|
||||
/src/deps/lol*html
|
||||
/src/deps/libarchive
|
||||
/src/deps/libdeflate
|
||||
/src/deps/libuv
|
||||
/src/deps/ls*hpack
|
||||
/src/deps/mimalloc
|
||||
/src/deps/picohttpparser
|
||||
/src/deps/tinycc
|
||||
/src/deps/zstd
|
||||
/src/deps/zlib
|
||||
/src/deps/zig
|
||||
|
||||
# Generated files
|
||||
|
||||
.buildkite/ci.yml
|
||||
zig-out
|
||||
100
.gitmodules
vendored
Normal file
100
.gitmodules
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
[submodule "src/javascript/jsc/WebKit"]
|
||||
path = src/bun.js/WebKit
|
||||
url = https://github.com/oven-sh/WebKit.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
update = none
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/picohttpparser"]
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/mimalloc"]
|
||||
path = src/deps/mimalloc
|
||||
url = https://github.com/Jarred-Sumner/mimalloc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/zlib"]
|
||||
path = src/deps/zlib
|
||||
url = https://github.com/cloudflare/zlib.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/libarchive"]
|
||||
path = src/deps/libarchive
|
||||
url = https://github.com/libarchive/libarchive.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/boringssl"]
|
||||
path = src/deps/boringssl
|
||||
url = https://github.com/oven-sh/boringssl.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/lol-html"]
|
||||
path = src/deps/lol-html
|
||||
url = https://github.com/cloudflare/lol-html
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/tinycc"]
|
||||
path = src/deps/tinycc
|
||||
url = https://github.com/Jarred-Sumner/tinycc.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/c-ares"]
|
||||
path = src/deps/c-ares
|
||||
url = https://github.com/c-ares/c-ares.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/zstd"]
|
||||
path = src/deps/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/base64"]
|
||||
path = src/deps/base64
|
||||
url = https://github.com/aklomp/base64.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "src/deps/ls-hpack"]
|
||||
path = src/deps/ls-hpack
|
||||
url = https://github.com/litespeedtech/ls-hpack.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "zig"]
|
||||
path = src/deps/zig
|
||||
url = https://github.com/oven-sh/zig
|
||||
branch = bun
|
||||
depth = 1
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
[submodule "test/node.js/upstream"]
|
||||
path = test/node.js/upstream
|
||||
url = https://github.com/nodejs/node.git
|
||||
ignore = dirty
|
||||
depth = 1
|
||||
update = none
|
||||
shallow = true
|
||||
fetchRecurseSubmodules = false
|
||||
@@ -1,4 +0,0 @@
|
||||
command script import vendor/zig/tools/lldb_pretty_printers.py
|
||||
command script import vendor/WebKit/Tools/lldb/lldb_webkit.py
|
||||
|
||||
# type summary add --summary-string "${var} | inner=${var[0-30]}, source=${var[33-64]}, tag=${var[31-32]}" "unsigned long"
|
||||
@@ -1,10 +1,7 @@
|
||||
src/bun.js/WebKit
|
||||
vendor
|
||||
src/deps
|
||||
test/snapshots
|
||||
test/js/deno
|
||||
test/node.js
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
test/js/node/test/fixtures
|
||||
test/js/node/test/common
|
||||
test/snippets
|
||||
|
||||
42
.vscode/c_cpp_properties.json
vendored
42
.vscode/c_cpp_properties.json
vendored
@@ -3,18 +3,18 @@
|
||||
{
|
||||
"name": "Debug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"compileCommands": "${workspaceFolder}/build/debug/compile_commands.json",
|
||||
"compileCommands": "${workspaceFolder}/build/compile_commands.json",
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/bun-webkit/include",
|
||||
"${workspaceFolder}/build/debug/codegen",
|
||||
"${workspaceFolder}/build/codegen",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/src",
|
||||
"${workspaceFolder}/packages/",
|
||||
@@ -26,8 +26,8 @@
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/vendor/*",
|
||||
"${workspaceFolder}/vendor/boringssl/include/*",
|
||||
"${workspaceFolder}/src/deps/*",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/*",
|
||||
"${workspaceFolder}/packages/bun-uws/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
@@ -55,12 +55,12 @@
|
||||
"name": "BunWithJSCDebug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/debug/codegen",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
|
||||
"${workspaceFolder}/build/codegen",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
@@ -68,19 +68,19 @@
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/src/js/out",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/src",
|
||||
"${workspaceFolder}/packages/",
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers/**",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
@@ -90,8 +90,8 @@
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/js/out/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/src/deps",
|
||||
"${workspaceFolder}/src/deps/boringssl/include/",
|
||||
"${workspaceFolder}/packages/bun-usockets/",
|
||||
"${workspaceFolder}/packages/bun-uws/",
|
||||
"${workspaceFolder}/src/napi",
|
||||
|
||||
434
.vscode/launch.json
generated
vendored
434
.vscode/launch.json
generated
vendored
File diff suppressed because it is too large
Load Diff
26
.vscode/settings.json
vendored
26
.vscode/settings.json
vendored
@@ -12,11 +12,9 @@
|
||||
"search.exclude": {
|
||||
"node_modules": true,
|
||||
".git": true,
|
||||
"vendor/*/**": true,
|
||||
"src/bun.js/WebKit": true,
|
||||
"src/deps/*/**": true,
|
||||
"test/node.js/upstream": true,
|
||||
// This will fill up your whole search history.
|
||||
"test/js/node/test/fixtures": true,
|
||||
"test/js/node/test/common": true,
|
||||
},
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
@@ -28,14 +26,10 @@
|
||||
|
||||
// Zig
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOnSave": false,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
|
||||
"zig.path": "${workspaceFolder}/.cache/zig/zig.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"[zig]": {
|
||||
@@ -44,26 +38,19 @@
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
},
|
||||
|
||||
// lldb
|
||||
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
|
||||
"lldb.verboseLogging": false,
|
||||
|
||||
// C++
|
||||
"lldb.verboseLogging": false,
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"[c]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"[h]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"clangd.arguments": ["-header-insertion=never"],
|
||||
|
||||
// JavaScript
|
||||
"prettier.enable": true,
|
||||
@@ -75,7 +62,6 @@
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
"prettier.prettierPath": "./node_modules/prettier",
|
||||
|
||||
// TypeScript
|
||||
"typescript.tsdk": "${workspaceFolder}/node_modules/typescript/lib",
|
||||
@@ -141,7 +127,6 @@
|
||||
},
|
||||
"files.associations": {
|
||||
"*.idl": "cpp",
|
||||
"array": "cpp",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
@@ -161,5 +146,4 @@
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
}
|
||||
|
||||
1469
CMakeLists.txt
1469
CMakeLists.txt
File diff suppressed because it is too large
Load Diff
161
CONTRIBUTING.md
161
CONTRIBUTING.md
@@ -2,16 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
|
||||
{% /details %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
@@ -30,7 +25,7 @@ $ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
|
||||
$ sudo zypper install go cmake ninja automake git rustup && rustup toolchain install stable
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -60,10 +55,10 @@ $ brew install bun
|
||||
|
||||
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install llvm@18
|
||||
$ brew install llvm@16
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
@@ -77,8 +72,8 @@ $ sudo pacman -S llvm clang lld
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install 'dnf-command(copr)'
|
||||
$ sudo dnf copr enable -y @fedora-llvm-team/llvm17
|
||||
$ sudo dnf install llvm16 clang16 lld16-devel
|
||||
$ sudo dnf copr enable -y @fedora-llvm-team/llvm-snapshots
|
||||
$ sudo dnf install llvm clang lld
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
@@ -97,7 +92,7 @@ $ which clang-16
|
||||
|
||||
If not, run this to manually add it:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
{% codetabs %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
@@ -112,79 +107,96 @@ $ export PATH="$PATH:/usr/lib/llvm16/bin"
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
> ⚠️ Ubuntu distributions may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
|
||||
## Building Bun
|
||||
|
||||
After cloning the repository, run the following command to build. This may take a while as it will clone submodules and build dependencies.
|
||||
After cloning the repository, run the following command to run the first build. This may take a while as it will clone submodules and build dependencies.
|
||||
|
||||
```bash
|
||||
$ bun setup
|
||||
```
|
||||
|
||||
The binary will be located at `./build/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun.
|
||||
|
||||
```bash
|
||||
$ build/bun-debug --version
|
||||
x.y.z_debug
|
||||
```
|
||||
|
||||
To rebuild, you can invoke `bun run build`
|
||||
|
||||
```bash
|
||||
$ bun run build
|
||||
```
|
||||
|
||||
The binary will be located at `./build/debug/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun.
|
||||
These two scripts, `setup` and `build`, are aliases to do roughly the following:
|
||||
|
||||
```bash
|
||||
$ build/debug/bun-debug --version
|
||||
x.y.z_debug
|
||||
$ ./scripts/setup.sh
|
||||
$ cmake -S . -B build -G Ninja -DCMAKE_BUILD_TYPE=Debug
|
||||
$ ninja -C build # 'bun run build' runs just this
|
||||
```
|
||||
|
||||
Advanced users can pass CMake flags to customize the build.
|
||||
|
||||
## VSCode
|
||||
|
||||
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
|
||||
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./vendor/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension).
|
||||
|
||||
We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-debug` in your terminal:
|
||||
|
||||
```sh
|
||||
$ bun-debug
|
||||
```
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./.cache/zig/zig` (`zig.exe` on Windows).
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
|
||||
{% callout %}
|
||||
|
||||
In particular, these are:
|
||||
**Note**: This section is outdated. The code generators are run automatically by ninja, instead of by `make`.
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
{% /callout %}
|
||||
|
||||
Bun leverages a lot of code generation scripts.
|
||||
|
||||
The [./src/bun.js/bindings/headers.h](https://github.com/oven-sh/bun/blob/main/src/bun.js/bindings/headers.h) file has bindings to & from Zig <> C++ code. This file is generated by running the following:
|
||||
|
||||
```bash
|
||||
$ make headers
|
||||
```
|
||||
|
||||
This ensures that the types for Zig and the types for C++ match up correctly, by using comptime reflection over functions exported/imported.
|
||||
|
||||
TypeScript files that end with `*.classes.ts` are another code generation script. They generate C++ boilerplate for classes implemented in Zig. The generated code lives in:
|
||||
|
||||
- [src/bun.js/bindings/ZigGeneratedClasses.cpp](https://github.com/oven-sh/bun/tree/main/src/bun.js/bindings/ZigGeneratedClasses.cpp)
|
||||
- [src/bun.js/bindings/ZigGeneratedClasses.h](https://github.com/oven-sh/bun/tree/main/src/bun.js/bindings/ZigGeneratedClasses.h)
|
||||
- [src/bun.js/bindings/generated_classes.zig](https://github.com/oven-sh/bun/tree/main/src/bun.js/bindings/generated_classes.zig)
|
||||
To generate the code, run:
|
||||
|
||||
```bash
|
||||
$ make codegen
|
||||
```
|
||||
|
||||
Lastly, we also have a [code generation script](src/bun.js/scripts/generate-jssink.js) for our native stream implementations.
|
||||
To run that, run:
|
||||
|
||||
```bash
|
||||
$ make generate-sink
|
||||
```
|
||||
|
||||
You probably won't need to run that one much.
|
||||
|
||||
## Modifying ESM modules
|
||||
|
||||
Certain modules like `node:fs`, `node:stream`, `bun:sqlite`, and `ws` are implemented in JavaScript. These live in `src/js/{node,bun,thirdparty}` files and are pre-bundled using Bun.
|
||||
Certain modules like `node:fs`, `node:stream`, `bun:sqlite`, and `ws` are implemented in JavaScript. These live in `src/js/{node,bun,thirdparty}` files and are pre-bundled using Bun. In debug builds, Bun automatically loads these from the filesystem, wherever it was compiled, so no need to re-run `make dev`.
|
||||
|
||||
## Release build
|
||||
|
||||
To compile a release build of Bun, run:
|
||||
To build a release build of Bun, run:
|
||||
|
||||
```bash
|
||||
$ bun run build:release
|
||||
```
|
||||
|
||||
The binary will be located at `./build/release/bun` and `./build/release/bun-profile`.
|
||||
|
||||
### Download release build from pull requests
|
||||
|
||||
To save you time spent building a release build locally, we provide a way to run release builds from pull requests. This is useful for manully testing changes in a release build before they are merged.
|
||||
|
||||
To run a release build from a pull request, you can use the `bun-pr` npm package:
|
||||
|
||||
```sh
|
||||
bunx bun-pr <pr-number>
|
||||
bunx bun-pr <branch-name>
|
||||
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
|
||||
```
|
||||
|
||||
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
|
||||
|
||||
```sh
|
||||
bun-1234566 --version
|
||||
```
|
||||
|
||||
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
|
||||
The binary will be located at `./build-release/bun` and `./build-release/bun-profile`.
|
||||
|
||||
## Valgrind
|
||||
|
||||
@@ -206,18 +218,24 @@ $ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
|
||||
## Building WebKit locally + Debug mode of JSC
|
||||
|
||||
{% callout %}
|
||||
|
||||
**TODO**: This is out of date. TLDR is pass `-DUSE_DEBUG_JSC=1` or `-DWEBKIT_DIR=...` to CMake. it will probably need more fiddling. ask @paperdave if you need this.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
|
||||
|
||||
```bash
|
||||
# Clone WebKit into ./vendor/WebKit
|
||||
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
# once you run this, `make submodule` can be used to automatically
|
||||
# update WebKit and the other submodules
|
||||
$ git submodule update --init --depth 1 --checkout src/bun.js/WebKit
|
||||
# to make a jsc release build
|
||||
$ make jsc
|
||||
# JSC debug build does not work perfectly with Bun yet, this is actively being
|
||||
# worked on and will eventually become the default.
|
||||
$ make jsc-build-linux-compile-debug cpp
|
||||
$ make jsc-build-mac-compile-debug cpp
|
||||
```
|
||||
|
||||
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
@@ -285,27 +303,12 @@ If you see this error when compiling, run:
|
||||
$ xcode-select --install
|
||||
```
|
||||
|
||||
### Cannot find `libatomic.a`
|
||||
## Cannot find `libatomic.a`
|
||||
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
```bash
|
||||
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
$ bun setup -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
|
||||
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`
|
||||
|
||||
609
Dockerfile
Normal file
609
Dockerfile
Normal file
@@ -0,0 +1,609 @@
|
||||
# This Dockerfile is used by CI workflows to build Bun. It is not intended as a development
|
||||
# environment, or to be used as a base image for other projects.
|
||||
#
|
||||
# You likely want this image instead: https://hub.docker.com/r/oven/bun
|
||||
#
|
||||
# TODO: move this file to reduce confusion
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG GITHUB_WORKSPACE=/build
|
||||
ARG WEBKIT_DIR=${GITHUB_WORKSPACE}/bun-webkit
|
||||
ARG BUN_RELEASE_DIR=${GITHUB_WORKSPACE}/bun-release
|
||||
ARG BUN_DEPS_OUT_DIR=${GITHUB_WORKSPACE}/bun-deps
|
||||
ARG BUN_DIR=${GITHUB_WORKSPACE}/bun
|
||||
ARG CPU_TARGET=native
|
||||
ARG ARCH=x86_64
|
||||
ARG BUILD_MACHINE_ARCH=x86_64
|
||||
ARG BUILDARCH=amd64
|
||||
ARG TRIPLET=${ARCH}-linux-gnu
|
||||
ARG GIT_SHA=""
|
||||
ARG BUN_VERSION="bun-v1.1.4"
|
||||
ARG BUN_DOWNLOAD_URL_BASE="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/${BUN_VERSION}"
|
||||
ARG CANARY=0
|
||||
ARG ASSERTIONS=OFF
|
||||
ARG ZIG_OPTIMIZE=ReleaseFast
|
||||
ARG CMAKE_BUILD_TYPE=Release
|
||||
|
||||
ARG NODE_VERSION="20"
|
||||
ARG LLVM_VERSION="16"
|
||||
ARG ZIG_VERSION="0.12.0-dev.1828+225fe6ddb"
|
||||
|
||||
ARG SCCACHE_BUCKET
|
||||
ARG SCCACHE_REGION
|
||||
ARG SCCACHE_S3_USE_SSL
|
||||
ARG SCCACHE_ENDPOINT
|
||||
ARG AWS_ACCESS_KEY_ID
|
||||
ARG AWS_SECRET_ACCESS_KEY
|
||||
|
||||
FROM bitnami/minideb:bullseye as bun-base
|
||||
|
||||
ARG BUN_DOWNLOAD_URL_BASE
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG BUN_VERSION
|
||||
ARG NODE_VERSION
|
||||
ARG LLVM_VERSION
|
||||
ARG BUILD_MACHINE_ARCH
|
||||
ARG BUN_DIR
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG CPU_TARGET
|
||||
|
||||
ENV CI 1
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ENV BUILDARCH=${BUILDARCH}
|
||||
ENV BUN_DEPS_OUT_DIR=${BUN_DEPS_OUT_DIR}
|
||||
|
||||
ENV CXX=clang++-${LLVM_VERSION}
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
ENV AR=/usr/bin/llvm-ar-${LLVM_VERSION}
|
||||
ENV LD=lld-${LLVM_VERSION}
|
||||
|
||||
ENV SCCACHE_BUCKET=${SCCACHE_BUCKET}
|
||||
ENV SCCACHE_REGION=${SCCACHE_REGION}
|
||||
ENV SCCACHE_S3_USE_SSL=${SCCACHE_S3_USE_SSL}
|
||||
ENV SCCACHE_ENDPOINT=${SCCACHE_ENDPOINT}
|
||||
ENV AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID}
|
||||
ENV AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY}
|
||||
|
||||
RUN install_packages \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
&& echo "deb https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" > /etc/apt/sources.list.d/llvm.list \
|
||||
&& echo "deb-src https://apt.llvm.org/bullseye/ llvm-toolchain-bullseye-${LLVM_VERSION} main" >> /etc/apt/sources.list.d/llvm.list \
|
||||
&& curl -fsSL "https://apt.llvm.org/llvm-snapshot.gpg.key" | apt-key add - \
|
||||
&& echo "deb https://deb.nodesource.com/node_${NODE_VERSION}.x nodistro main" > /etc/apt/sources.list.d/nodesource.list \
|
||||
&& curl -fsSL "https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key" | apt-key add - \
|
||||
&& echo "deb https://apt.kitware.com/ubuntu/ focal main" > /etc/apt/sources.list.d/kitware.list \
|
||||
&& curl -fsSL "https://apt.kitware.com/keys/kitware-archive-latest.asc" | apt-key add - \
|
||||
&& install_packages \
|
||||
wget \
|
||||
bash \
|
||||
software-properties-common \
|
||||
build-essential \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool \
|
||||
pkg-config \
|
||||
clang-${LLVM_VERSION} \
|
||||
lld-${LLVM_VERSION} \
|
||||
lldb-${LLVM_VERSION} \
|
||||
clangd-${LLVM_VERSION} \
|
||||
libc++-${LLVM_VERSION}-dev \
|
||||
libc++abi-${LLVM_VERSION}-dev \
|
||||
make \
|
||||
cmake \
|
||||
ninja-build \
|
||||
file \
|
||||
libc-dev \
|
||||
libxml2 \
|
||||
libxml2-dev \
|
||||
xz-utils \
|
||||
git \
|
||||
tar \
|
||||
rsync \
|
||||
gzip \
|
||||
unzip \
|
||||
perl \
|
||||
python3 \
|
||||
ruby \
|
||||
ruby-dev \
|
||||
golang \
|
||||
nodejs && \
|
||||
for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
|
||||
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
|
||||
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
|
||||
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& arch="$(dpkg --print-architecture)" \
|
||||
&& case "${arch##*-}" in \
|
||||
amd64) variant="x64";; \
|
||||
arm64) variant="aarch64";; \
|
||||
*) echo "unsupported architecture: $arch"; exit 1 ;; \
|
||||
esac \
|
||||
&& wget "${BUN_DOWNLOAD_URL_BASE}/bun-linux-${variant}.zip" \
|
||||
&& unzip bun-linux-${variant}.zip \
|
||||
&& mv bun-linux-${variant}/bun /usr/bin/bun \
|
||||
&& ln -s /usr/bin/bun /usr/bin/bunx \
|
||||
&& rm -rf bun-linux-${variant} bun-linux-${variant}.zip \
|
||||
&& mkdir -p ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
# && if [ -n "${SCCACHE_BUCKET}" ]; then \
|
||||
# echo "Setting up sccache" \
|
||||
# && wget https://github.com/mozilla/sccache/releases/download/v0.5.4/sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
|
||||
# && tar xf sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz \
|
||||
# && mv sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl/sccache /usr/bin/sccache \
|
||||
# && rm -rf sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl.tar.gz sccache-v0.5.4-${BUILD_MACHINE_ARCH}-unknown-linux-musl \
|
||||
|
||||
FROM bun-base as bun-base-with-zig
|
||||
|
||||
ARG ZIG_VERSION
|
||||
ARG BUILD_MACHINE_ARCH
|
||||
ARG ZIG_FOLDERNAME=zig-linux-${BUILD_MACHINE_ARCH}-${ZIG_VERSION}
|
||||
ARG ZIG_FILENAME=${ZIG_FOLDERNAME}.tar.xz
|
||||
ARG ZIG_URL="https://ziglang.org/builds/${ZIG_FILENAME}"
|
||||
ARG ZIG_LOCAL_CACHE_DIR=/zig-cache
|
||||
ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR}
|
||||
|
||||
WORKDIR $GITHUB_WORKSPACE
|
||||
|
||||
ADD $ZIG_URL .
|
||||
RUN tar xf ${ZIG_FILENAME} \
|
||||
&& mv ${ZIG_FOLDERNAME}/lib /usr/lib/zig \
|
||||
&& mv ${ZIG_FOLDERNAME}/zig /usr/bin/zig \
|
||||
&& rm -rf ${ZIG_FILENAME} ${ZIG_FOLDERNAME}
|
||||
|
||||
FROM bun-base as c-ares
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/c-ares ${BUN_DIR}/src/deps/c-ares
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make c-ares \
|
||||
&& rm -rf ${BUN_DIR}/src/deps/c-ares ${BUN_DIR}/Makefile
|
||||
|
||||
FROM bun-base as lolhtml
|
||||
|
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/lol-html ${BUN_DIR}/src/deps/lol-html
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
export PATH=$PATH:$HOME/.cargo/bin \
|
||||
&& cd ${BUN_DIR} \
|
||||
&& make lolhtml \
|
||||
&& rm -rf src/deps/lol-html Makefile
|
||||
|
||||
FROM bun-base as mimalloc
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ARG ASSERTIONS
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make mimalloc \
|
||||
&& rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bun-base as mimalloc-debug
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ARG ASSERTIONS
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/mimalloc ${BUN_DIR}/src/deps/mimalloc
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make mimalloc-debug \
|
||||
&& rm -rf src/deps/mimalloc Makefile
|
||||
|
||||
FROM bun-base as zlib
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zlib ${BUN_DIR}/src/deps/zlib
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zlib \
|
||||
&& rm -rf src/deps/zlib Makefile
|
||||
|
||||
FROM bun-base as libarchive
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN install_packages autoconf automake libtool pkg-config
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/libarchive ${BUN_DIR}/src/deps/libarchive
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make libarchive \
|
||||
&& rm -rf src/deps/libarchive Makefile
|
||||
|
||||
FROM bun-base as tinycc
|
||||
|
||||
ARG BUN_DEPS_OUT_DIR
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
RUN install_packages libtcc-dev && cp /usr/lib/$(uname -m)-linux-gnu/libtcc.a ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
FROM bun-base as boringssl
|
||||
|
||||
RUN install_packages golang
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/boringssl ${BUN_DIR}/src/deps/boringssl
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd ${BUN_DIR} \
|
||||
&& make boringssl \
|
||||
&& rm -rf src/deps/boringssl Makefile
|
||||
|
||||
FROM bun-base as base64
|
||||
|
||||
ARG BUN_DIR
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/base64 ${BUN_DIR}/src/deps/base64
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN cd $BUN_DIR && \
|
||||
make base64 && rm -rf src/deps/base64 Makefile
|
||||
|
||||
FROM bun-base as zstd
|
||||
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/zstd ${BUN_DIR}/src/deps/zstd
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make zstd
|
||||
|
||||
FROM bun-base as ls-hpack
|
||||
|
||||
ARG BUN_DIR
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY Makefile ${BUN_DIR}/Makefile
|
||||
COPY src/deps/ls-hpack ${BUN_DIR}/src/deps/ls-hpack
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& make lshpack
|
||||
|
||||
FROM bun-base-with-zig as bun-identifier-cache
|
||||
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG GITHUB_WORKSPACE
|
||||
ARG CPU_TARGET
|
||||
ARG BUN_DIR
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY src/js_lexer/identifier_data.zig ${BUN_DIR}/src/js_lexer/identifier_data.zig
|
||||
COPY src/js_lexer/identifier_cache.zig ${BUN_DIR}/src/js_lexer/identifier_cache.zig
|
||||
|
||||
RUN --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \
|
||||
cd $BUN_DIR \
|
||||
&& zig run src/js_lexer/identifier_data.zig
|
||||
|
||||
FROM bun-base as bun-node-fallbacks
|
||||
|
||||
ARG BUN_DIR
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
COPY src/node-fallbacks ${BUN_DIR}/src/node-fallbacks
|
||||
|
||||
RUN cd $BUN_DIR/src/node-fallbacks \
|
||||
&& bun install --frozen-lockfile \
|
||||
&& bun run build \
|
||||
&& rm -rf src/node-fallbacks/node_modules
|
||||
|
||||
FROM bun-base as bun-webkit
|
||||
|
||||
ARG BUILDARCH
|
||||
ARG ASSERTIONS
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
|
||||
RUN mkdir ${BUN_DIR}/bun-webkit \
|
||||
&& WEBKIT_TAG=$(grep 'set(WEBKIT_TAG' "${BUN_DIR}/CMakeLists.txt" | awk '{print $2}' | cut -f 1 -d ')') \
|
||||
&& WEBKIT_SUFFIX=$(if [ "${ASSERTIONS}" = "ON" ]; then echo "debug"; else echo "lto"; fi) \
|
||||
&& WEBKIT_URL="https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_TAG}/bun-webkit-linux-${BUILDARCH}-${WEBKIT_SUFFIX}.tar.gz" \
|
||||
&& echo "Downloading ${WEBKIT_URL}" \
|
||||
&& curl -fsSL "${WEBKIT_URL}" | tar -xz -C ${BUN_DIR}/bun-webkit --strip-components=1
|
||||
|
||||
FROM bun-base as bun-cpp-objects
|
||||
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
COPY --from=bun-webkit ${BUN_DIR}/bun-webkit ${BUN_DIR}/bun-webkit
|
||||
|
||||
COPY packages ${BUN_DIR}/packages
|
||||
COPY src ${BUN_DIR}/src
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY src/deps/boringssl/include ${BUN_DIR}/src/deps/boringssl/include
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} mkdir ${BUN_DIR}/build \
|
||||
&& cd ${BUN_DIR}/build \
|
||||
&& mkdir -p tmp_modules tmp_functions js codegen \
|
||||
&& cmake .. -GNinja -DCMAKE_BUILD_TYPE=Release -DUSE_LTO=ON -DUSE_DEBUG_JSC=${ASSERTIONS} -DBUN_CPP_ONLY=1 -DWEBKIT_DIR=/build/bun/bun-webkit -DCANARY=${CANARY} -DZIG_COMPILER=system \
|
||||
&& bash compile-cpp-only.sh -v
|
||||
|
||||
FROM bun-base-with-zig as bun-codegen-for-zig
|
||||
|
||||
COPY package.json bun.lockb Makefile .gitmodules ${BUN_DIR}/
|
||||
COPY src/runtime ${BUN_DIR}/src/runtime
|
||||
COPY src/runtime.js src/runtime.bun.js ${BUN_DIR}/src/
|
||||
COPY packages/bun-error ${BUN_DIR}/packages/bun-error
|
||||
COPY src/fallback.ts ${BUN_DIR}/src/fallback.ts
|
||||
COPY src/api ${BUN_DIR}/src/api
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
# TODO: move away from Makefile entirely
|
||||
RUN --mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \
|
||||
bun install --frozen-lockfile \
|
||||
&& make runtime_js fallback_decoder bun_error \
|
||||
&& rm -rf src/runtime src/fallback.ts node_modules bun.lockb package.json Makefile
|
||||
|
||||
FROM bun-base-with-zig as bun-compile-zig-obj
|
||||
|
||||
ARG ZIG_PATH
|
||||
ARG TRIPLET
|
||||
ARG GIT_SHA
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY=0
|
||||
ARG ASSERTIONS=OFF
|
||||
ARG ZIG_OPTIMIZE=ReleaseFast
|
||||
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
|
||||
COPY *.zig package.json CMakeLists.txt ${BUN_DIR}/
|
||||
COPY completions ${BUN_DIR}/completions
|
||||
COPY packages ${BUN_DIR}/packages
|
||||
COPY src ${BUN_DIR}/src
|
||||
|
||||
COPY --from=bun-identifier-cache ${BUN_DIR}/src/js_lexer/*.blob ${BUN_DIR}/src/js_lexer/
|
||||
COPY --from=bun-node-fallbacks ${BUN_DIR}/src/node-fallbacks/out ${BUN_DIR}/src/node-fallbacks/out
|
||||
COPY --from=bun-codegen-for-zig ${BUN_DIR}/src/*.out.js ${BUN_DIR}/src/*.out.refresh.js ${BUN_DIR}/src/
|
||||
COPY --from=bun-codegen-for-zig ${BUN_DIR}/packages/bun-error/dist ${BUN_DIR}/packages/bun-error/dist
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
--mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \
|
||||
mkdir -p build \
|
||||
&& bun run $BUN_DIR/src/codegen/bundle-modules.ts --debug=OFF $BUN_DIR/build \
|
||||
&& cd build \
|
||||
&& cmake .. \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_LTO=ON \
|
||||
-DZIG_OPTIMIZE="${ZIG_OPTIMIZE}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DZIG_TARGET="${TRIPLET}" \
|
||||
-DWEBKIT_DIR="omit" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DNO_CODEGEN=1 \
|
||||
-DBUN_ZIG_OBJ="/tmp/bun-zig.o" \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
-DZIG_LIB_DIR=$BUN_DIR/src/deps/zig/lib \
|
||||
&& ONLY_ZIG=1 ninja "/tmp/bun-zig.o" -v
|
||||
|
||||
FROM scratch as build_release_obj
|
||||
|
||||
ARG CPU_TARGET
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o /
|
||||
|
||||
FROM bun-base as bun-link
|
||||
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
ARG ZIG_LOCAL_CACHE_DIR=/zig-cache
|
||||
ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR}
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=mimalloc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=ls-hpack ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
--mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \
|
||||
cmake .. \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DUSE_LTO=ON \
|
||||
-DUSE_DEBUG_JSC=${ASSERTIONS} \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
|
||||
-DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
&& ninja -v \
|
||||
&& ./bun --revision \
|
||||
&& mkdir -p /build/out \
|
||||
&& mv bun bun-profile /build/out \
|
||||
&& rm -rf ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
FROM scratch as artifact
|
||||
|
||||
COPY --from=bun-link /build/out /
|
||||
|
||||
FROM bun-base as bun-link-assertions
|
||||
|
||||
ARG CPU_TARGET
|
||||
ARG CANARY
|
||||
ARG ASSERTIONS
|
||||
|
||||
ENV CPU_TARGET=${CPU_TARGET}
|
||||
ARG CCACHE_DIR=/ccache
|
||||
ENV CCACHE_DIR=${CCACHE_DIR}
|
||||
ARG ZIG_LOCAL_CACHE_DIR=/zig-cache
|
||||
ENV ZIG_LOCAL_CACHE_DIR=${ZIG_LOCAL_CACHE_DIR}
|
||||
|
||||
WORKDIR $BUN_DIR
|
||||
|
||||
RUN mkdir -p build bun-webkit
|
||||
|
||||
# lol
|
||||
COPY src/bun.js/bindings/sqlite/sqlite3.c ${BUN_DIR}/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
COPY src/symbols.dyn src/linker.lds ${BUN_DIR}/src/
|
||||
|
||||
COPY CMakeLists.txt ${BUN_DIR}/CMakeLists.txt
|
||||
COPY --from=zlib ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=base64 ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=libarchive ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=boringssl ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=lolhtml ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=mimalloc-debug ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=zstd ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=tinycc ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=c-ares ${BUN_DEPS_OUT_DIR}/* ${BUN_DEPS_OUT_DIR}/
|
||||
COPY --from=bun-compile-zig-obj /tmp/bun-zig.o ${BUN_DIR}/build/bun-zig.o
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/build/bun-cpp-objects.a ${BUN_DIR}/build/bun-cpp-objects.a
|
||||
COPY --from=bun-cpp-objects ${BUN_DIR}/bun-webkit/lib ${BUN_DIR}/bun-webkit/lib
|
||||
|
||||
WORKDIR $BUN_DIR/build
|
||||
|
||||
RUN --mount=type=cache,target=${CCACHE_DIR} \
|
||||
--mount=type=cache,target=${ZIG_LOCAL_CACHE_DIR} \
|
||||
cmake .. \
|
||||
-G Ninja \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DBUN_LINK_ONLY=1 \
|
||||
-DBUN_ZIG_OBJ="${BUN_DIR}/build/bun-zig.o" \
|
||||
-DUSE_DEBUG_JSC=ON \
|
||||
-DBUN_CPP_ARCHIVE="${BUN_DIR}/build/bun-cpp-objects.a" \
|
||||
-DWEBKIT_DIR="${BUN_DIR}/bun-webkit" \
|
||||
-DBUN_DEPS_OUT_DIR="${BUN_DEPS_OUT_DIR}" \
|
||||
-DCPU_TARGET="${CPU_TARGET}" \
|
||||
-DNO_CONFIGURE_DEPENDS=1 \
|
||||
-DCANARY="${CANARY}" \
|
||||
-DZIG_COMPILER=system \
|
||||
-DUSE_LTO=ON \
|
||||
&& ninja -v \
|
||||
&& ./bun --revision \
|
||||
&& mkdir -p /build/out \
|
||||
&& mv bun bun-profile /build/out \
|
||||
&& rm -rf ${BUN_DIR} ${BUN_DEPS_OUT_DIR}
|
||||
|
||||
FROM scratch as artifact-assertions
|
||||
|
||||
COPY --from=bun-link-assertions /build/out /
|
||||
73
LICENSE.md
73
LICENSE.md
@@ -1,73 +0,0 @@
|
||||
Bun itself is MIT-licensed.
|
||||
|
||||
## JavaScriptCore
|
||||
|
||||
Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCore files from WebKit are also licensed under LGPL2. Per LGPL2:
|
||||
|
||||
> (1) If you statically link against an LGPL’d library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
|
||||
|
||||
You can find the patched version of WebKit used by Bun here: <https://github.com/oven-sh/webkit>. If you would like to relink Bun with changes:
|
||||
|
||||
- `git submodule update --init --recursive`
|
||||
- `make jsc`
|
||||
- `zig build`
|
||||
|
||||
This compiles JavaScriptCore, compiles Bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
|
||||
## Linked libraries
|
||||
|
||||
Bun statically links these libraries:
|
||||
|
||||
| Library | License |
|
||||
|---------|---------|
|
||||
| [`boringssl`](https://boringssl.googlesource.com/boringssl/) | [several licenses](https://boringssl.googlesource.com/boringssl/+/refs/heads/master/LICENSE) |
|
||||
| [`brotli`](https://github.com/google/brotli) | MIT |
|
||||
| [`libarchive`](https://github.com/libarchive/libarchive) | [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING) |
|
||||
| [`lol-html`](https://github.com/cloudflare/lol-html/tree/master/c-api) | BSD 3-Clause |
|
||||
| [`mimalloc`](https://github.com/microsoft/mimalloc) | MIT |
|
||||
| [`picohttp`](https://github.com/h2o/picohttpparser) | dual-licensed under the Perl License or the MIT License |
|
||||
| [`zstd`](https://github.com/facebook/zstd) | dual-licensed under the BSD License or GPLv2 license |
|
||||
| [`simdutf`](https://github.com/simdutf/simdutf) | Apache 2.0 |
|
||||
| [`tinycc`](https://github.com/tinycc/tinycc) | LGPL v2.1 |
|
||||
| [`uSockets`](https://github.com/uNetworking/uSockets) | Apache 2.0 |
|
||||
| [`zlib-cloudflare`](https://github.com/cloudflare/zlib) | zlib |
|
||||
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
|
||||
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
## Polyfills
|
||||
|
||||
For compatibility reasons, the following packages are embedded into Bun's binary and injected if imported.
|
||||
|
||||
| Package | License |
|
||||
|---------|---------|
|
||||
| [`assert`](https://npmjs.com/package/assert) | MIT |
|
||||
| [`browserify-zlib`](https://npmjs.com/package/browserify-zlib) | MIT |
|
||||
| [`buffer`](https://npmjs.com/package/buffer) | MIT |
|
||||
| [`constants-browserify`](https://npmjs.com/package/constants-browserify) | MIT |
|
||||
| [`crypto-browserify`](https://npmjs.com/package/crypto-browserify) | MIT |
|
||||
| [`domain-browser`](https://npmjs.com/package/domain-browser) | MIT |
|
||||
| [`events`](https://npmjs.com/package/events) | MIT |
|
||||
| [`https-browserify`](https://npmjs.com/package/https-browserify) | MIT |
|
||||
| [`os-browserify`](https://npmjs.com/package/os-browserify) | MIT |
|
||||
| [`path-browserify`](https://npmjs.com/package/path-browserify) | MIT |
|
||||
| [`process`](https://npmjs.com/package/process) | MIT |
|
||||
| [`punycode`](https://npmjs.com/package/punycode) | MIT |
|
||||
| [`querystring-es3`](https://npmjs.com/package/querystring-es3) | MIT |
|
||||
| [`stream-browserify`](https://npmjs.com/package/stream-browserify) | MIT |
|
||||
| [`stream-http`](https://npmjs.com/package/stream-http) | MIT |
|
||||
| [`string_decoder`](https://npmjs.com/package/string_decoder) | MIT |
|
||||
| [`timers-browserify`](https://npmjs.com/package/timers-browserify) | MIT |
|
||||
| [`tty-browserify`](https://npmjs.com/package/tty-browserify) | MIT |
|
||||
| [`url`](https://npmjs.com/package/url) | MIT |
|
||||
| [`util`](https://npmjs.com/package/util) | MIT |
|
||||
| [`vm-browserify`](https://npmjs.com/package/vm-browserify) | MIT |
|
||||
|
||||
## Additional credits
|
||||
|
||||
- Bun's JS transpiler, CSS lexer, and Node.js module resolver source code is a Zig port of [@evanw](https://github.com/evanw)’s [esbuild](https://github.com/evanw/esbuild) project.
|
||||
- Credit to [@kipply](https://github.com/kipply) for the name "Bun"!
|
||||
165
Makefile
165
Makefile
@@ -1,10 +1,3 @@
|
||||
# ------------------------------------------------------------
|
||||
# WARNING
|
||||
# ------------------------------------------------------------
|
||||
# This file is very old and will be removed soon!
|
||||
# You can build Bun using `cmake` or `bun run build`
|
||||
# ------------------------------------------------------------
|
||||
|
||||
SHELL := $(shell which bash) # Use bash syntax to be consistent
|
||||
|
||||
OS_NAME := $(shell uname -s | tr '[:upper:]' '[:lower:]')
|
||||
@@ -33,11 +26,8 @@ ifeq ($(ARCH_NAME_RAW),arm64)
|
||||
ARCH_NAME = aarch64
|
||||
DOCKER_BUILDARCH = arm64
|
||||
BREW_PREFIX_PATH = /opt/homebrew
|
||||
DEFAULT_MIN_MACOS_VERSION = 13.0
|
||||
DEFAULT_MIN_MACOS_VERSION = 11.0
|
||||
MARCH_NATIVE = -mtune=$(CPU_TARGET)
|
||||
ifeq ($(OS_NAME),linux)
|
||||
MARCH_NATIVE = -march=armv8-a+crc -mtune=ampere1
|
||||
endif
|
||||
else
|
||||
ARCH_NAME = x64
|
||||
DOCKER_BUILDARCH = amd64
|
||||
@@ -77,7 +67,7 @@ BUN_RELEASE_BIN = $(PACKAGE_DIR)/bun
|
||||
PRETTIER ?= $(shell which prettier 2>/dev/null || echo "./node_modules/.bin/prettier")
|
||||
ESBUILD = "$(shell which esbuild 2>/dev/null || echo "./node_modules/.bin/esbuild")"
|
||||
DSYMUTIL ?= $(shell which dsymutil 2>/dev/null || which dsymutil-15 2>/dev/null)
|
||||
WEBKIT_DIR ?= $(realpath vendor/WebKit)
|
||||
WEBKIT_DIR ?= $(realpath src/bun.js/WebKit)
|
||||
WEBKIT_RELEASE_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Release
|
||||
WEBKIT_DEBUG_DIR ?= $(WEBKIT_DIR)/WebKitBuild/Debug
|
||||
WEBKIT_RELEASE_DIR_LTO ?= $(WEBKIT_DIR)/WebKitBuild/ReleaseLTO
|
||||
@@ -138,8 +128,8 @@ endif
|
||||
SED = $(shell which gsed 2>/dev/null || which sed 2>/dev/null)
|
||||
|
||||
BUN_DIR ?= $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
|
||||
BUN_DEPS_DIR ?= $(shell pwd)/vendor
|
||||
BUN_DEPS_OUT_DIR ?= $(shell pwd)/build/release
|
||||
BUN_DEPS_DIR ?= $(shell pwd)/src/deps
|
||||
BUN_DEPS_OUT_DIR ?= $(BUN_DEPS_DIR)
|
||||
CPU_COUNT = 2
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
CPU_COUNT = $(shell sysctl -n hw.logicalcpu)
|
||||
@@ -164,12 +154,7 @@ CMAKE_FLAGS_WITHOUT_RELEASE = -DCMAKE_C_COMPILER=$(CC) \
|
||||
-DCMAKE_OSX_DEPLOYMENT_TARGET=$(MIN_MACOS_VERSION) \
|
||||
$(CMAKE_CXX_COMPILER_LAUNCHER_FLAG) \
|
||||
-DCMAKE_AR=$(AR) \
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null) \
|
||||
-DCMAKE_CXX_STANDARD=20 \
|
||||
-DCMAKE_C_STANDARD=17 \
|
||||
-DCMAKE_CXX_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_C_STANDARD_REQUIRED=ON \
|
||||
-DCMAKE_CXX_EXTENSIONS=ON
|
||||
-DCMAKE_RANLIB=$(which llvm-16-ranlib 2>/dev/null || which llvm-ranlib 2>/dev/null)
|
||||
|
||||
|
||||
|
||||
@@ -196,8 +181,8 @@ endif
|
||||
|
||||
OPTIMIZATION_LEVEL=-O3 $(MARCH_NATIVE)
|
||||
DEBUG_OPTIMIZATION_LEVEL= -O1 $(MARCH_NATIVE) -gdwarf-4
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden -mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -fno-asynchronous-unwind-tables -fno-unwind-tables -fno-pie -fno-pic
|
||||
CFLAGS_WITHOUT_MARCH = $(MACOS_MIN_FLAG) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
BUN_CFLAGS = $(MACOS_MIN_FLAG) $(MARCH_NATIVE) $(OPTIMIZATION_LEVEL) -fno-exceptions -fvisibility=hidden -fvisibility-inlines-hidden
|
||||
BUN_TMP_DIR := /tmp/make-bun
|
||||
CFLAGS=$(CFLAGS_WITHOUT_MARCH) $(MARCH_NATIVE)
|
||||
|
||||
@@ -246,17 +231,14 @@ _MIMALLOC_LINK = $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
DEFAULT_LINKER_FLAGS =
|
||||
|
||||
JSC_BUILD_STEPS :=
|
||||
JSC_BUILD_STEPS_DEBUG :=
|
||||
ifeq ($(OS_NAME),linux)
|
||||
JSC_BUILD_STEPS += jsc-build-linux
|
||||
JSC_BUILD_STEPS_DEBUG += jsc-build-linux-debug
|
||||
_MIMALLOC_LINK = $(BUN_DEPS_OUT_DIR)/$(MIMALLOC_FILE)
|
||||
DEFAULT_LINKER_FLAGS= -pthread -ldl
|
||||
endif
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
_MIMALLOC_OBJECT_FILE = 0
|
||||
JSC_BUILD_STEPS += jsc-build-mac jsc-copy-headers
|
||||
JSC_BUILD_STEPS_DEBUG += jsc-build-mac-debug
|
||||
_MIMALLOC_FILE = libmimalloc.a
|
||||
_MIMALLOC_INPUT_PATH = libmimalloc.a
|
||||
_MIMALLOC_LINK = -lmimalloc
|
||||
@@ -373,7 +355,7 @@ ifeq ($(OS_NAME),linux)
|
||||
endif
|
||||
|
||||
ifeq ($(OS_NAME),darwin)
|
||||
MACOS_MIN_FLAG=-mmacos-version-min=$(MIN_MACOS_VERSION)
|
||||
MACOS_MIN_FLAG=-mmacosx-version-min=$(MIN_MACOS_VERSION)
|
||||
POSIX_PKG_MANAGER=brew
|
||||
INCLUDE_DIRS += $(MAC_INCLUDE_DIRS)
|
||||
endif
|
||||
@@ -464,7 +446,8 @@ MINIMUM_ARCHIVE_FILES = -L$(BUN_DEPS_OUT_DIR) \
|
||||
-ldecrepit \
|
||||
-lssl \
|
||||
-lcrypto \
|
||||
-llolhtml
|
||||
-llolhtml \
|
||||
-lbase64
|
||||
|
||||
ARCHIVE_FILES_WITHOUT_LIBCRYPTO = $(MINIMUM_ARCHIVE_FILES) \
|
||||
-larchive \
|
||||
@@ -689,10 +672,19 @@ assert-deps:
|
||||
@test $(shell cargo --version | awk '{print $$2}' | cut -d. -f2) -gt 57 || (echo -e "ERROR: cargo version must be at least 1.57."; exit 1)
|
||||
@echo "You have the dependencies installed! Woo"
|
||||
|
||||
# the following allows you to run `make submodule` to update or init submodules. but we will exclude webkit
|
||||
# unless you explicitly clone it yourself (a huge download)
|
||||
SUBMODULE_NAMES=$(shell cat .gitmodules | grep 'path = ' | awk '{print $$3}')
|
||||
ifeq ("$(wildcard src/bun.js/WebKit/.git)", "")
|
||||
SUBMODULE_NAMES := $(filter-out src/bun.js/WebKit, $(SUBMODULE_NAMES))
|
||||
endif
|
||||
|
||||
.PHONY: init-submodules
|
||||
init-submodules: submodule # (backwards-compatibility alias)
|
||||
|
||||
.PHONY: submodule
|
||||
submodule: ## to init or update all submodules
|
||||
git submodule update --init --recursive --progress --depth=1 --checkout $(SUBMODULE_NAMES)
|
||||
|
||||
.PHONY: build-obj
|
||||
build-obj:
|
||||
@@ -795,7 +787,7 @@ cls:
|
||||
@echo -e "\n\n---\n\n"
|
||||
|
||||
jsc-check:
|
||||
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/vendor/WebKit -f $(shell pwd)/vendor/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
|
||||
@ls $(JSC_BASE_DIR) >/dev/null 2>&1 || (echo -e "Failed to access WebKit build. Please compile the WebKit submodule using the Dockerfile at $(shell pwd)/src/javascript/WebKit/Dockerfile and then copy from /output in the Docker container to $(JSC_BASE_DIR). You can override the directory via JSC_BASE_DIR. \n\n DOCKER_BUILDKIT=1 docker build -t bun-webkit $(shell pwd)/src/bun.js/WebKit -f $(shell pwd)/src/bun.js/WebKit/Dockerfile --progress=plain\n\n docker container create bun-webkit\n\n # Get the container ID\n docker container ls\n\n docker cp DOCKER_CONTAINER_ID_YOU_JUST_FOUND:/output $(JSC_BASE_DIR)" && exit 1)
|
||||
@ls $(JSC_INCLUDE_DIR) >/dev/null 2>&1 || (echo "Failed to access WebKit include directory at $(JSC_INCLUDE_DIR)." && exit 1)
|
||||
@ls $(JSC_LIB) >/dev/null 2>&1 || (echo "Failed to access WebKit lib directory at $(JSC_LIB)." && exit 1)
|
||||
|
||||
@@ -925,18 +917,14 @@ bun-codesign-release-local-debug:
|
||||
|
||||
.PHONY: jsc
|
||||
jsc: jsc-build jsc-copy-headers jsc-bindings
|
||||
.PHONY: jsc-debug
|
||||
jsc-debug: jsc-build-debug
|
||||
.PHONY: jsc-build
|
||||
jsc-build: $(JSC_BUILD_STEPS)
|
||||
.PHONY: jsc-build-debug
|
||||
jsc-build-debug: $(JSC_BUILD_STEPS_DEBUG)
|
||||
.PHONY: jsc-bindings
|
||||
jsc-bindings: headers bindings
|
||||
|
||||
.PHONY: clone-submodules
|
||||
clone-submodules:
|
||||
git -c submodule."vendor/WebKit".update=none submodule update --init --recursive --depth=1 --progress
|
||||
git -c submodule."src/bun.js/WebKit".update=none submodule update --init --recursive --depth=1 --progress
|
||||
|
||||
|
||||
.PHONY: headers
|
||||
@@ -1187,57 +1175,6 @@ jsc-copy-headers:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
|
||||
find $(WEBKIT_RELEASE_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_RELEASE_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
|
||||
|
||||
jsc-copy-headers-debug:
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/heap/WeakHandleOwner.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/WeakHandleOwner.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/LazyClassStructureInlines.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/LazyClassStructureInlines.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/LazyPropertyInlines.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/LazyPropertyInlines.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSTypedArrayViewPrototype.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSTypedArrayViewPrototype.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSTypedArrayPrototypes.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSTypedArrayPrototypes.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSModuleNamespaceObject.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSModuleNamespaceObject.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JIT.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JIT.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubInfo.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubInfo.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AccessCase.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AccessCase.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/ObjectPropertyConditionSet.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ObjectPropertyConditionSet.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/PolyProtoAccessChain.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/PolyProtoAccessChain.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/InlineCacheCompiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/InlineCacheCompiler.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StructureStubClearingWatchpoint.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StructureStubClearingWatchpoint.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/AdaptiveInferredPropertyValueWatchpointBase.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AdaptiveInferredPropertyValueWatchpointBase.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/StubInfoSummary.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/StubInfoSummary.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/CommonSlowPaths.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/CommonSlowPaths.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/DirectArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DirectArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GenericArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GenericArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SamplingProfiler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SamplingProfiler.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/ScopedArguments.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ScopedArguments.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSLexicalEnvironment.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSLexicalEnvironment.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITDisassembler.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITDisassembler.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITInlineCacheGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITInlineCacheGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITMathIC.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITMathIC.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITAddGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITAddGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITMathICInlineResult.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITMathICInlineResult.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/SnippetOperand.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SnippetOperand.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITMulGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITMulGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITNegGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITNegGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITSubGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITSubGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/Repatch.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/Repatch.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITRightShiftGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITRightShiftGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JITBitBinaryOpGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JITBitBinaryOpGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/jit/JSInterfaceJIT.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSInterfaceJIT.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/llint/LLIntData.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/LLIntData.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/FunctionCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/FunctionCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/dfg/DFGAbstractHeap.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DFGAbstractHeap.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/OperandsInlines.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/OperandsInlines.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/Operands.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/Operands.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/domjit/DOMJITHeapRange.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/DOMJITHeapRange.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GeneratorPrototype.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GeneratorPrototype.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/GeneratorFunctionPrototype.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/GeneratorFunctionPrototype.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AsyncFunctionPrototype.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AsyncFunctionPrototype.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/SymbolObject.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/SymbolObject.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/JSGenerator.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSGenerator.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/bytecode/UnlinkedFunctionCodeBlock.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/UnlinkedFunctionCodeBlock.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/runtime/AggregateError.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/AggregateError.h
|
||||
cp $(WEBKIT_DIR)/Source/JavaScriptCore/API/JSWeakValue.h $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/JSWeakValue.h
|
||||
find $(WEBKIT_DEBUG_DIR)/JavaScriptCore/Headers/JavaScriptCore/ -name "*.h" -exec cp {} $(WEBKIT_DEBUG_DIR)/JavaScriptCore/PrivateHeaders/JavaScriptCore/ \;
|
||||
|
||||
# This is a workaround for a JSC bug that impacts aarch64
|
||||
# on macOS, it never requests JIT permissions
|
||||
.PHONY: jsc-force-fastjit
|
||||
@@ -1256,7 +1193,7 @@ jsc-build-mac-compile:
|
||||
-DENABLE_STATIC_JSC=ON \
|
||||
-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON \
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DBUN_FAST_TLS=ON \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
@@ -1268,7 +1205,7 @@ jsc-build-mac-compile:
|
||||
$(WEBKIT_DIR) \
|
||||
$(WEBKIT_RELEASE_DIR) && \
|
||||
CFLAGS="$(CFLAGS) -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -ffat-lto-objects" \
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config RelWithDebInfo --target jsc
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config Release --target jsc
|
||||
|
||||
.PHONY: jsc-build-mac-compile-lto
|
||||
jsc-build-mac-compile-lto:
|
||||
@@ -1307,7 +1244,6 @@ jsc-build-mac-compile-debug:
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_MALLOC_HEAP_BREAKDOWN=ON \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
@@ -1346,31 +1282,8 @@ jsc-build-linux-compile-config:
|
||||
$(WEBKIT_DIR) \
|
||||
$(WEBKIT_RELEASE_DIR)
|
||||
|
||||
.PHONY: jsc-build-linux-compile-config-debug
|
||||
jsc-build-linux-compile-config-debug:
|
||||
mkdir -p $(WEBKIT_DEBUG_DIR)
|
||||
cd $(WEBKIT_DEBUG_DIR) && \
|
||||
cmake \
|
||||
-DPORT="JSCOnly" \
|
||||
-DENABLE_STATIC_JSC=ON \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON \
|
||||
-DUSE_THIN_ARCHIVES=OFF \
|
||||
-DUSE_BUN_JSC_ADDITIONS=ON \
|
||||
-DENABLE_FTL_JIT=ON \
|
||||
-DENABLE_REMOTE_INSPECTOR=ON \
|
||||
-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION \
|
||||
-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON \
|
||||
-USE_VISIBILITY_ATTRIBUTE=1 \
|
||||
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON \
|
||||
-G Ninja \
|
||||
-DCMAKE_CXX_COMPILER=$(CXX) \
|
||||
-DCMAKE_C_COMPILER=$(CC) \
|
||||
$(WEBKIT_DIR) \
|
||||
$(WEBKIT_DEBUG_DIR)
|
||||
|
||||
# If you get "Error: could not load cache"
|
||||
# run rm -rf vendor/WebKit/CMakeCache.txt
|
||||
# run rm -rf src/bun.js/WebKit/CMakeCache.txt
|
||||
.PHONY: jsc-build-linux-compile-build
|
||||
jsc-build-linux-compile-build:
|
||||
mkdir -p $(WEBKIT_RELEASE_DIR) && \
|
||||
@@ -1378,34 +1291,19 @@ jsc-build-linux-compile-build:
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
|
||||
cmake --build $(WEBKIT_RELEASE_DIR) --config relwithdebuginfo --target jsc
|
||||
|
||||
.PHONY: jsc-build-linux-compile-build-debug
|
||||
jsc-build-linux-compile-build-debug:
|
||||
mkdir -p $(WEBKIT_DEBUG_DIR) && \
|
||||
cd $(WEBKIT_DEBUG_DIR) && \
|
||||
CFLAGS="$(CFLAGS) -Wl,--whole-archive -ffat-lto-objects" CXXFLAGS="$(CXXFLAGS) -Wl,--whole-archive -ffat-lto-objects -DUSE_BUN_JSC_ADDITIONS=ON" \
|
||||
cmake --build $(WEBKIT_DEBUG_DIR) --config Debug --target jsc
|
||||
|
||||
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-mac-copy
|
||||
|
||||
jsc-build-mac: jsc-force-fastjit jsc-build-mac-compile jsc-build-copy
|
||||
jsc-build-mac-debug: jsc-force-fastjit jsc-build-mac-compile-debug
|
||||
jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-mac-copy
|
||||
|
||||
jsc-build-linux: jsc-build-linux-compile-config jsc-build-linux-compile-build jsc-build-copy
|
||||
jsc-build-linux-debug: jsc-build-linux-compile-config-debug jsc-build-linux-compile-build-debug
|
||||
|
||||
jsc-build-copy:
|
||||
jsc-build-mac-copy:
|
||||
cp $(WEBKIT_RELEASE_DIR)/lib/libJavaScriptCore.a $(BUN_DEPS_OUT_DIR)/libJavaScriptCore.a
|
||||
#cp $(WEBKIT_RELEASE_DIR)/lib/libLowLevelInterpreterLib.a $(BUN_DEPS_OUT_DIR)/libLowLevelInterpreterLib.a
|
||||
cp $(WEBKIT_RELEASE_DIR)/lib/libWTF.a $(BUN_DEPS_OUT_DIR)/libWTF.a
|
||||
cp $(WEBKIT_RELEASE_DIR)/lib/libbmalloc.a $(BUN_DEPS_OUT_DIR)/libbmalloc.a
|
||||
|
||||
jsc-build-copy-debug:
|
||||
cp $(WEBKIT_DEBUG_DIR)/lib/libJavaScriptCore.a $(BUN_DEPS_OUT_DIR)/libJavaScriptCore.a
|
||||
#cp $(WEBKIT_DEBUG_DIR)/lib/libLowLevelInterpreterLib.a $(BUN_DEPS_OUT_DIR)/libLowLevelInterpreterLib.a
|
||||
cp $(WEBKIT_DEBUG_DIR)/lib/libWTF.a $(BUN_DEPS_OUT_DIR)/libWTF.a
|
||||
cp $(WEBKIT_DEBUG_DIR)/lib/libbmalloc.a $(BUN_DEPS_OUT_DIR)/libbmalloc.a
|
||||
|
||||
clean-jsc:
|
||||
cd vendor/WebKit && rm -rf **/CMakeCache.txt **/CMakeFiles && rm -rf vendor/WebKit/WebKitBuild
|
||||
cd src/bun.js/WebKit && rm -rf **/CMakeCache.txt **/CMakeFiles && rm -rf src/bun.js/WebKit/WebKitBuild
|
||||
clean-bindings:
|
||||
rm -rf $(OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/*.o $(DEBUG_OBJ_DIR)/webcore/*.o $(DEBUG_BINDINGS_OBJ) $(OBJ_DIR)/webcore/*.o $(BINDINGS_OBJ) $(OBJ_DIR)/*.d $(DEBUG_OBJ_DIR)/*.d
|
||||
|
||||
@@ -1977,6 +1875,11 @@ copy-to-bun-release-dir-bin:
|
||||
|
||||
PACKAGE_MAP = --pkg-begin async_io $(BUN_DIR)/src/io/io_darwin.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin javascript_core $(BUN_DIR)/src/jsc.zig --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end --pkg-end --pkg-begin bun $(BUN_DIR)/src/bun_redirect.zig --pkg-end
|
||||
|
||||
.PHONY: base64
|
||||
base64:
|
||||
cd $(BUN_DEPS_DIR)/base64 && make clean && rm -rf CMakeCache.txt CMakeFiles && cmake $(CMAKE_FLAGS) . && make
|
||||
cp $(BUN_DEPS_DIR)/base64/libbase64.a $(BUN_DEPS_OUT_DIR)/libbase64.a
|
||||
|
||||
.PHONY: cold-jsc-start
|
||||
cold-jsc-start:
|
||||
$(CXX_WITH_CCACHE) $(CLANG_FLAGS) \
|
||||
@@ -1994,7 +1897,7 @@ cold-jsc-start:
|
||||
misctools/cold-jsc-start.cpp -o cold-jsc-start
|
||||
|
||||
.PHONY: vendor-without-npm
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd
|
||||
vendor-without-npm: node-fallbacks runtime_js fallback_decoder bun_error mimalloc picohttp zlib boringssl libarchive lolhtml sqlite usockets uws lshpack tinycc c-ares zstd base64
|
||||
|
||||
|
||||
.PHONY: vendor-without-check
|
||||
|
||||
309
README.md
309
README.md
@@ -1,5 +1,5 @@
|
||||
<p align="center">
|
||||
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
|
||||
<a href="https://bun.sh"><img src="https://user-images.githubusercontent.com/709451/182802334-d9c42afe-f35d-4a7b-86ea-9985f73f20c3.png" alt="Logo" height=170></a>
|
||||
</p>
|
||||
<h1 align="center">Bun</h1>
|
||||
|
||||
@@ -24,6 +24,8 @@
|
||||
|
||||
## What is Bun?
|
||||
|
||||
> **Bun is under active development.** Use it to speed up your development workflows or run simpler production code in resource-constrained environments like serverless functions. We're working on more complete Node.js compatibility and integration with existing frameworks. Join the [Discord](https://bun.sh/discord) and watch the [GitHub repository](https://github.com/oven-sh/bun) to keep tabs on future releases.
|
||||
|
||||
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
|
||||
|
||||
At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-in replacement for Node.js. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
|
||||
@@ -85,310 +87,45 @@ bun upgrade --canary
|
||||
## Quick links
|
||||
|
||||
- Intro
|
||||
|
||||
- [What is Bun?](https://bun.sh/docs/index)
|
||||
- [Installation](https://bun.sh/docs/installation)
|
||||
- [Quickstart](https://bun.sh/docs/quickstart)
|
||||
- [TypeScript](https://bun.sh/docs/typescript)
|
||||
|
||||
- Templating
|
||||
|
||||
- CLI
|
||||
- [`bun run`](https://bun.sh/docs/cli/run)
|
||||
- [`bun install`](https://bun.sh/docs/cli/install)
|
||||
- [`bun test`](https://bun.sh/docs/cli/test)
|
||||
- [`bun init`](https://bun.sh/docs/cli/init)
|
||||
- [`bun create`](https://bun.sh/docs/cli/bun-create)
|
||||
|
||||
- Runtime
|
||||
|
||||
- [`bun run`](https://bun.sh/docs/cli/run)
|
||||
- [File types](https://bun.sh/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.sh/docs/runtime/typescript)
|
||||
- [JSX](https://bun.sh/docs/runtime/jsx)
|
||||
- [Environment variables](https://bun.sh/docs/runtime/env)
|
||||
- [Bun APIs](https://bun.sh/docs/runtime/bun-apis)
|
||||
- [Web APIs](https://bun.sh/docs/runtime/web-apis)
|
||||
- [Node.js compatibility](https://bun.sh/docs/runtime/nodejs-apis)
|
||||
- [Single-file executable](https://bun.sh/docs/bundler/executables)
|
||||
- [Plugins](https://bun.sh/docs/runtime/plugins)
|
||||
- [Watch mode](https://bun.sh/docs/runtime/hot)
|
||||
- [Module resolution](https://bun.sh/docs/runtime/modules)
|
||||
- [Auto-install](https://bun.sh/docs/runtime/autoimport)
|
||||
- [bunfig.toml](https://bun.sh/docs/runtime/bunfig)
|
||||
- [Debugger](https://bun.sh/docs/runtime/debugger)
|
||||
- [Framework API](https://bun.sh/docs/runtime/framework)
|
||||
|
||||
- Package manager
|
||||
|
||||
- [`bun install`](https://bun.sh/docs/cli/install)
|
||||
- [`bun add`](https://bun.sh/docs/cli/add)
|
||||
- [`bun remove`](https://bun.sh/docs/cli/remove)
|
||||
- [`bun update`](https://bun.sh/docs/cli/update)
|
||||
- [`bun link`](https://bun.sh/docs/cli/link)
|
||||
- [`bun pm`](https://bun.sh/docs/cli/pm)
|
||||
- [Global cache](https://bun.sh/docs/install/cache)
|
||||
- [Workspaces](https://bun.sh/docs/install/workspaces)
|
||||
- [Lifecycle scripts](https://bun.sh/docs/install/lifecycle)
|
||||
- [Filter](https://bun.sh/docs/cli/filter)
|
||||
- [Lockfile](https://bun.sh/docs/install/lockfile)
|
||||
- [Scopes and registries](https://bun.sh/docs/install/registries)
|
||||
- [Overrides and resolutions](https://bun.sh/docs/install/overrides)
|
||||
|
||||
- Bundler
|
||||
|
||||
- [`Bun.build`](https://bun.sh/docs/bundler)
|
||||
- [Loaders](https://bun.sh/docs/bundler/loaders)
|
||||
- [Plugins](https://bun.sh/docs/bundler/plugins)
|
||||
- [Macros](https://bun.sh/docs/bundler/macros)
|
||||
- [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild)
|
||||
|
||||
- Test runner
|
||||
|
||||
- [`bun test`](https://bun.sh/docs/cli/test)
|
||||
- [Writing tests](https://bun.sh/docs/test/writing)
|
||||
- [Watch mode](https://bun.sh/docs/test/hot)
|
||||
- [Lifecycle hooks](https://bun.sh/docs/test/lifecycle)
|
||||
- [Mocks](https://bun.sh/docs/test/mocks)
|
||||
- [Snapshots](https://bun.sh/docs/test/snapshots)
|
||||
- [Dates and times](https://bun.sh/docs/test/time)
|
||||
- [DOM testing](https://bun.sh/docs/test/dom)
|
||||
- [Code coverage](https://bun.sh/docs/test/coverage)
|
||||
|
||||
- Package runner
|
||||
|
||||
- [`bunx`](https://bun.sh/docs/cli/bunx)
|
||||
|
||||
- Runtime
|
||||
- [Runtime](https://bun.sh/docs/runtime/index)
|
||||
- [Module resolution](https://bun.sh/docs/runtime/modules)
|
||||
- [Hot & live reloading](https://bun.sh/docs/runtime/hot)
|
||||
- [Plugins](https://bun.sh/docs/bundler/plugins)
|
||||
- Ecosystem
|
||||
- [Node.js](https://bun.sh/docs/ecosystem/nodejs)
|
||||
- [TypeScript](https://bun.sh/docs/ecosystem/typescript)
|
||||
- [React](https://bun.sh/docs/ecosystem/react)
|
||||
- [Elysia](https://bun.sh/docs/ecosystem/elysia)
|
||||
- [Hono](https://bun.sh/docs/ecosystem/hono)
|
||||
- [Express](https://bun.sh/docs/ecosystem/express)
|
||||
- [awesome-bun](https://github.com/apvarun/awesome-bun)
|
||||
- API
|
||||
|
||||
- [HTTP server](https://bun.sh/docs/api/http)
|
||||
- [HTTP](https://bun.sh/docs/api/http)
|
||||
- [WebSockets](https://bun.sh/docs/api/websockets)
|
||||
- [Workers](https://bun.sh/docs/api/workers)
|
||||
- [Binary data](https://bun.sh/docs/api/binary-data)
|
||||
- [Streams](https://bun.sh/docs/api/streams)
|
||||
- [TCP Sockets](https://bun.sh/docs/api/tcp)
|
||||
- [File I/O](https://bun.sh/docs/api/file-io)
|
||||
- [import.meta](https://bun.sh/docs/api/import-meta)
|
||||
- [SQLite](https://bun.sh/docs/api/sqlite)
|
||||
- [FileSystemRouter](https://bun.sh/docs/api/file-system-router)
|
||||
- [TCP sockets](https://bun.sh/docs/api/tcp)
|
||||
- [UDP sockets](https://bun.sh/docs/api/udp)
|
||||
- [Globals](https://bun.sh/docs/api/globals)
|
||||
- [$ Shell](https://bun.sh/docs/runtime/shell)
|
||||
- [Child processes](https://bun.sh/docs/api/spawn)
|
||||
- [Spawn](https://bun.sh/docs/api/spawn)
|
||||
- [Transpiler](https://bun.sh/docs/api/transpiler)
|
||||
- [Hashing](https://bun.sh/docs/api/hashing)
|
||||
- [Console](https://bun.sh/docs/api/console)
|
||||
- [FFI](https://bun.sh/docs/api/ffi)
|
||||
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
|
||||
- [Testing](https://bun.sh/docs/api/test)
|
||||
- [Utils](https://bun.sh/docs/api/utils)
|
||||
- [Node-API](https://bun.sh/docs/api/node-api)
|
||||
- [Glob](https://bun.sh/docs/api/glob)
|
||||
- [Semver](https://bun.sh/docs/api/semver)
|
||||
|
||||
- Project
|
||||
- [Roadmap](https://bun.sh/docs/project/roadmap)
|
||||
- [Benchmarking](https://bun.sh/docs/project/benchmarking)
|
||||
- [Contributing](https://bun.sh/docs/project/contributing)
|
||||
- [Building Windows](https://bun.sh/docs/project/building-windows)
|
||||
- [License](https://bun.sh/docs/project/licensing)
|
||||
|
||||
## Guides
|
||||
|
||||
- Binary
|
||||
|
||||
- [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview)
|
||||
- [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream)
|
||||
- [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string)
|
||||
- [Convert a Blob to a Uint8Array](https://bun.sh/guides/binary/blob-to-typedarray)
|
||||
- [Convert a Blob to an ArrayBuffer](https://bun.sh/guides/binary/blob-to-arraybuffer)
|
||||
- [Convert a Buffer to a blob](https://bun.sh/guides/binary/buffer-to-blob)
|
||||
- [Convert a Buffer to a ReadableStream](https://bun.sh/guides/binary/buffer-to-readablestream)
|
||||
- [Convert a Buffer to a string](https://bun.sh/guides/binary/buffer-to-string)
|
||||
- [Convert a Buffer to a Uint8Array](https://bun.sh/guides/binary/buffer-to-typedarray)
|
||||
- [Convert a Buffer to an ArrayBuffer](https://bun.sh/guides/binary/buffer-to-arraybuffer)
|
||||
- [Convert a DataView to a string](https://bun.sh/guides/binary/dataview-to-string)
|
||||
- [Convert a Uint8Array to a Blob](https://bun.sh/guides/binary/typedarray-to-blob)
|
||||
- [Convert a Uint8Array to a Buffer](https://bun.sh/guides/binary/typedarray-to-buffer)
|
||||
- [Convert a Uint8Array to a DataView](https://bun.sh/guides/binary/typedarray-to-dataview)
|
||||
- [Convert a Uint8Array to a ReadableStream](https://bun.sh/guides/binary/typedarray-to-readablestream)
|
||||
- [Convert a Uint8Array to a string](https://bun.sh/guides/binary/typedarray-to-string)
|
||||
- [Convert a Uint8Array to an ArrayBuffer](https://bun.sh/guides/binary/typedarray-to-arraybuffer)
|
||||
- [Convert an ArrayBuffer to a Blob](https://bun.sh/guides/binary/arraybuffer-to-blob)
|
||||
- [Convert an ArrayBuffer to a Buffer](https://bun.sh/guides/binary/arraybuffer-to-buffer)
|
||||
- [Convert an ArrayBuffer to a string](https://bun.sh/guides/binary/arraybuffer-to-string)
|
||||
- [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray)
|
||||
- [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array)
|
||||
|
||||
- Ecosystem
|
||||
|
||||
- [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite)
|
||||
- [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro)
|
||||
- [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs)
|
||||
- [Build an app with Nuxt and Bun](https://bun.sh/guides/ecosystem/nuxt)
|
||||
- [Build an app with Qwik and Bun](https://bun.sh/guides/ecosystem/qwik)
|
||||
- [Build an app with Remix and Bun](https://bun.sh/guides/ecosystem/remix)
|
||||
- [Build an app with SolidStart and Bun](https://bun.sh/guides/ecosystem/solidstart)
|
||||
- [Build an app with SvelteKit and Bun](https://bun.sh/guides/ecosystem/sveltekit)
|
||||
- [Build an HTTP server using Elysia and Bun](https://bun.sh/guides/ecosystem/elysia)
|
||||
- [Build an HTTP server using Express and Bun](https://bun.sh/guides/ecosystem/express)
|
||||
- [Build an HTTP server using Hono and Bun](https://bun.sh/guides/ecosystem/hono)
|
||||
- [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric)
|
||||
- [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker)
|
||||
- [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs)
|
||||
- [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render)
|
||||
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose)
|
||||
- [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2)
|
||||
- [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd)
|
||||
- [Server-side render (SSR) a React component](https://bun.sh/guides/ecosystem/ssr-react)
|
||||
- [Use Drizzle ORM with Bun](https://bun.sh/guides/ecosystem/drizzle)
|
||||
- [Use EdgeDB with Bun](https://bun.sh/guides/ecosystem/edgedb)
|
||||
- [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres)
|
||||
- [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma)
|
||||
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
|
||||
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
|
||||
|
||||
- HTTP
|
||||
|
||||
- [Common HTTP server usage](https://bun.sh/guides/http/server)
|
||||
- [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls)
|
||||
- [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix)
|
||||
- [Hot reload an HTTP server](https://bun.sh/guides/http/hot)
|
||||
- [Proxy HTTP requests using fetch()](https://bun.sh/guides/http/proxy)
|
||||
- [Send an HTTP request using fetch](https://bun.sh/guides/http/fetch)
|
||||
- [Start a cluster of HTTP servers](https://bun.sh/guides/http/cluster)
|
||||
- [Stream a file as an HTTP Response](https://bun.sh/guides/http/stream-file)
|
||||
- [Streaming HTTP Server with Async Iterators](https://bun.sh/guides/http/stream-iterator)
|
||||
- [Streaming HTTP Server with Node.js Streams](https://bun.sh/guides/http/stream-node-streams-in-bun)
|
||||
- [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads)
|
||||
- [Write a simple HTTP server](https://bun.sh/guides/http/simple)
|
||||
|
||||
- Install
|
||||
|
||||
- [Add a dependency](https://bun.sh/guides/install/add)
|
||||
- [Add a development dependency](https://bun.sh/guides/install/add-dev)
|
||||
- [Add a Git dependency](https://bun.sh/guides/install/add-git)
|
||||
- [Add a peer dependency](https://bun.sh/guides/install/add-peer)
|
||||
- [Add a tarball dependency](https://bun.sh/guides/install/add-tarball)
|
||||
- [Add a trusted dependency](https://bun.sh/guides/install/trusted)
|
||||
- [Add an optional dependency](https://bun.sh/guides/install/add-optional)
|
||||
- [Configure a private registry for an organization scope with bun install](https://bun.sh/guides/install/registry-scope)
|
||||
- [Configure git to diff Bun's lockb lockfile](https://bun.sh/guides/install/git-diff-bun-lockfile)
|
||||
- [Configuring a monorepo using workspaces](https://bun.sh/guides/install/workspaces)
|
||||
- [Generate a human-readable lockfile](https://bun.sh/guides/install/yarnlock)
|
||||
- [Install a package under a different name](https://bun.sh/guides/install/npm-alias)
|
||||
- [Install dependencies with Bun in GitHub Actions](https://bun.sh/guides/install/cicd)
|
||||
- [Override the default npm registry for bun install](https://bun.sh/guides/install/custom-registry)
|
||||
- [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts)
|
||||
- [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory)
|
||||
|
||||
- Process
|
||||
|
||||
- [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds)
|
||||
- [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c)
|
||||
- [Listen to OS signals](https://bun.sh/guides/process/os-signals)
|
||||
- [Parse command-line arguments](https://bun.sh/guides/process/argv)
|
||||
- [Read from stdin](https://bun.sh/guides/process/stdin)
|
||||
- [Read stderr from a child process](https://bun.sh/guides/process/spawn-stderr)
|
||||
- [Read stdout from a child process](https://bun.sh/guides/process/spawn-stdout)
|
||||
- [Spawn a child process](https://bun.sh/guides/process/spawn)
|
||||
- [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc)
|
||||
|
||||
- Read file
|
||||
|
||||
- [Check if a file exists](https://bun.sh/guides/read-file/exists)
|
||||
- [Get the MIME type of a file](https://bun.sh/guides/read-file/mime)
|
||||
- [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream)
|
||||
- [Read a file as a string](https://bun.sh/guides/read-file/string)
|
||||
- [Read a file to a Buffer](https://bun.sh/guides/read-file/buffer)
|
||||
- [Read a file to a Uint8Array](https://bun.sh/guides/read-file/uint8array)
|
||||
- [Read a file to an ArrayBuffer](https://bun.sh/guides/read-file/arraybuffer)
|
||||
- [Read a JSON file](https://bun.sh/guides/read-file/json)
|
||||
- [Watch a directory for changes](https://bun.sh/guides/read-file/watch)
|
||||
|
||||
- Runtime
|
||||
|
||||
- [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger)
|
||||
- [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger)
|
||||
- [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant)
|
||||
- [Import a JSON file](https://bun.sh/guides/runtime/import-json)
|
||||
- [Import a TOML file](https://bun.sh/guides/runtime/import-toml)
|
||||
- [Import HTML file as text](https://bun.sh/guides/runtime/import-html)
|
||||
- [Install and run Bun in GitHub Actions](https://bun.sh/guides/runtime/cicd)
|
||||
- [Install TypeScript declarations for Bun](https://bun.sh/guides/runtime/typescript)
|
||||
- [Re-map import paths](https://bun.sh/guides/runtime/tsconfig-paths)
|
||||
- [Read environment variables](https://bun.sh/guides/runtime/read-env)
|
||||
- [Run a Shell Command](https://bun.sh/guides/runtime/shell)
|
||||
- [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone)
|
||||
- [Set environment variables](https://bun.sh/guides/runtime/set-env)
|
||||
|
||||
- Streams
|
||||
|
||||
- [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob)
|
||||
- [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string)
|
||||
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer)
|
||||
- [Convert a Node.js Readable to JSON](https://bun.sh/guides/streams/node-readable-to-json)
|
||||
- [Convert a ReadableStream to a Blob](https://bun.sh/guides/streams/to-blob)
|
||||
- [Convert a ReadableStream to a Buffer](https://bun.sh/guides/streams/to-buffer)
|
||||
- [Convert a ReadableStream to a string](https://bun.sh/guides/streams/to-string)
|
||||
- [Convert a ReadableStream to a Uint8Array](https://bun.sh/guides/streams/to-typedarray)
|
||||
- [Convert a ReadableStream to an array of chunks](https://bun.sh/guides/streams/to-array)
|
||||
- [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer)
|
||||
- [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json)
|
||||
|
||||
- Test
|
||||
|
||||
- [Bail early with the Bun test runner](https://bun.sh/guides/test/bail)
|
||||
- [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage)
|
||||
- [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests)
|
||||
- [Migrate from Jest to Bun's test runner](https://bun.sh/guides/test/migrate-from-jest)
|
||||
- [Mock functions in `bun test`](https://bun.sh/guides/test/mock-functions)
|
||||
- [Re-run tests multiple times with the Bun test runner](https://bun.sh/guides/test/rerun-each)
|
||||
- [Run tests in watch mode with Bun](https://bun.sh/guides/test/watch-mode)
|
||||
- [Run your tests with the Bun test runner](https://bun.sh/guides/test/run-tests)
|
||||
- [Set a code coverage threshold with the Bun test runner](https://bun.sh/guides/test/coverage-threshold)
|
||||
- [Set a per-test timeout with the Bun test runner](https://bun.sh/guides/test/timeout)
|
||||
- [Set the system time in Bun's test runner](https://bun.sh/guides/test/mock-clock)
|
||||
- [Skip tests with the Bun test runner](https://bun.sh/guides/test/skip-tests)
|
||||
- [Spy on methods in `bun test`](https://bun.sh/guides/test/spy-on)
|
||||
- [Update snapshots in `bun test`](https://bun.sh/guides/test/update-snapshots)
|
||||
- [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot)
|
||||
- [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom)
|
||||
|
||||
- Util
|
||||
|
||||
- [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint)
|
||||
- [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals)
|
||||
- [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate)
|
||||
- [Compress and decompress data with gzip](https://bun.sh/guides/util/gzip)
|
||||
- [Convert a file URL to an absolute path](https://bun.sh/guides/util/file-url-to-path)
|
||||
- [Convert an absolute path to a file URL](https://bun.sh/guides/util/path-to-file-url)
|
||||
- [Detect when code is executed with Bun](https://bun.sh/guides/util/detect-bun)
|
||||
- [Encode and decode base64 strings](https://bun.sh/guides/util/base64)
|
||||
- [Escape an HTML string](https://bun.sh/guides/util/escape-html)
|
||||
- [Get the absolute path of the current file](https://bun.sh/guides/util/import-meta-path)
|
||||
- [Get the absolute path to the current entrypoint](https://bun.sh/guides/util/main)
|
||||
- [Get the current Bun version](https://bun.sh/guides/util/version)
|
||||
- [Get the directory of the current file](https://bun.sh/guides/util/import-meta-dir)
|
||||
- [Get the file name of the current file](https://bun.sh/guides/util/import-meta-file)
|
||||
- [Get the path to an executable bin file](https://bun.sh/guides/util/which-path-to-executable-bin)
|
||||
- [Hash a password](https://bun.sh/guides/util/hash-a-password)
|
||||
- [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep)
|
||||
|
||||
- WebSocket
|
||||
|
||||
- [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub)
|
||||
- [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple)
|
||||
- [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression)
|
||||
- [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context)
|
||||
|
||||
- Write file
|
||||
- [Append content to a file](https://bun.sh/guides/write-file/append)
|
||||
- [Copy a file to another location](https://bun.sh/guides/write-file/file-cp)
|
||||
- [Delete a file](https://bun.sh/guides/write-file/unlink)
|
||||
- [Write a Blob to a file](https://bun.sh/guides/write-file/blob)
|
||||
- [Write a file incrementally](https://bun.sh/guides/write-file/filesink)
|
||||
- [Write a file to stdout](https://bun.sh/guides/write-file/cat)
|
||||
- [Write a ReadableStream to a file](https://bun.sh/guides/write-file/stream)
|
||||
- [Write a Response to a file](https://bun.sh/guides/write-file/response)
|
||||
- [Write a string to a file](https://bun.sh/guides/write-file/basic)
|
||||
- [Write to stdout](https://bun.sh/guides/write-file/stdout)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { run, bench } from "mitata";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { run, bench } from "mitata";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
bench/bun.lockb
BIN
bench/bun.lockb
Binary file not shown.
@@ -1,5 +1,5 @@
|
||||
import { copyFileSync, statSync, writeFileSync } from "node:fs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { copyFileSync, writeFileSync, readFileSync, statSync } from "node:fs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
function runner(ready) {
|
||||
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
modulusLength: 2048,
|
||||
publicKeyEncoding: {
|
||||
type: "spki",
|
||||
format: "pem",
|
||||
},
|
||||
privateKeyEncoding: {
|
||||
type: "pkcs8",
|
||||
format: "pem",
|
||||
},
|
||||
});
|
||||
|
||||
// Max message size for 2048-bit RSA keys
|
||||
const plaintext = crypto.getRandomValues(Buffer.alloc(214));
|
||||
|
||||
bench("RSA_PKCS1_OAEP_PADDING round-trip", () => {
|
||||
const ciphertext = crypto.publicEncrypt(keyPair.publicKey, plaintext);
|
||||
crypto.privateDecrypt(keyPair.privateKey, ciphertext);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,24 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
modulusLength: 2048,
|
||||
publicKeyEncoding: {
|
||||
type: "spki",
|
||||
format: "pem",
|
||||
},
|
||||
privateKeyEncoding: {
|
||||
type: "pkcs8",
|
||||
format: "pem",
|
||||
},
|
||||
});
|
||||
|
||||
// Max message size for 2048-bit RSA keys
|
||||
const plaintext = crypto.getRandomValues(Buffer.alloc(245));
|
||||
|
||||
bench("RSA sign RSA_PKCS1_PADDING round-trip", () => {
|
||||
const sig = crypto.privateEncrypt(keyPair.privateKey, plaintext);
|
||||
crypto.publicDecrypt(keyPair.publicKey, sig);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,27 +0,0 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const MAP_SIZE = 10_000;
|
||||
|
||||
function* genPairs(count) {
|
||||
for (let i = 0; i < MAP_SIZE; i++) {
|
||||
yield ["k" + i, "v" + i];
|
||||
}
|
||||
}
|
||||
|
||||
class CustomMap extends Map {
|
||||
abc = 123;
|
||||
constructor(iterable) {
|
||||
super(iterable);
|
||||
}
|
||||
}
|
||||
|
||||
const a = new Map(genPairs());
|
||||
const b = new Map(genPairs());
|
||||
bench("deepEqual Map", () => expect(a).toEqual(b));
|
||||
|
||||
const x = new CustomMap(genPairs());
|
||||
const y = new CustomMap(genPairs());
|
||||
bench("deepEqual CustomMap", () => expect(x).toEqual(y));
|
||||
|
||||
await run();
|
||||
@@ -1,27 +0,0 @@
|
||||
import { expect } from "bun:test";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const SET_SIZE = 10_000;
|
||||
|
||||
function* genValues(count) {
|
||||
for (let i = 0; i < SET_SIZE; i++) {
|
||||
yield "v" + i;
|
||||
}
|
||||
}
|
||||
|
||||
class CustomSet extends Set {
|
||||
abc = 123;
|
||||
constructor(iterable) {
|
||||
super(iterable);
|
||||
}
|
||||
}
|
||||
|
||||
const a = new Set(genValues());
|
||||
const b = new Set(genValues());
|
||||
bench("deepEqual Set", () => expect(a).toEqual(b));
|
||||
|
||||
const x = new CustomSet(genValues());
|
||||
const y = new CustomSet(genValues());
|
||||
bench("deepEqual CustomSet", () => expect(x).toEqual(y));
|
||||
|
||||
await run();
|
||||
@@ -1,5 +1,6 @@
|
||||
import EventEmitter3 from "eventemitter3";
|
||||
import { group } from "mitata";
|
||||
import EventEmitterNative from "node:events";
|
||||
import { group } from "../runner.mjs";
|
||||
|
||||
export const implementations = [
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
var id = 0;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
import { groupForEmitter } from "./implementations.mjs";
|
||||
|
||||
// Pseudo RNG is derived from https://stackoverflow.com/a/424445
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
// bun:test automatically rewrites this import to bun:test when run in bun
|
||||
import { expect, test } from "@jest/globals";
|
||||
import { test, expect } from "@jest/globals";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
|
||||
if (!Number.isSafeInteger(N)) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { expect, test } from "vitest";
|
||||
import { test, expect } from "vitest";
|
||||
|
||||
const N = parseInt(process.env.RUN_COUNT || "10000", 10);
|
||||
if (!Number.isSafeInteger(N)) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const count = 100;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const count = 100;
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { CString, dlopen, ptr } from "bun:ffi";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import { ptr, dlopen, CString, toBuffer } from "bun:ffi";
|
||||
import { run, bench, group } from "mitata";
|
||||
|
||||
const { napiNoop, napiHash, napiString } = require(import.meta.dir + "/src/ffi_napi_bench.node");
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import { run, bench, group } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const extension = "darwin" !== Deno.build.os ? "so" : "dylib";
|
||||
const path = new URL("src/target/release/libffi_napi_bench." + extension, import.meta.url).pathname;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import { createRequire } from "node:module";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const { napiNoop, napiHash, napiString } = require("./src/ffi_napi_bench.node");
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"scripts": {
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"deps": "cd src && bun run deps",
|
||||
"build": "cd src && bun run build",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import braces from "braces";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import { group, bench, run } from "mitata";
|
||||
|
||||
// const iterations = 1000;
|
||||
const iterations = 100;
|
||||
@@ -10,16 +10,15 @@ const veryComplexPattern = "{a,b,HI{c,e,LMAO{d,f}Q}}{1,2,{3,4},5}";
|
||||
|
||||
console.log(braces(complexPattern, { expand: true }));
|
||||
function benchPattern(pattern, name) {
|
||||
const _name = `${name} pattern: "${pattern}"`;
|
||||
group({ name: _name, summary: true }, () => {
|
||||
group({ name: `${name} pattern: "${pattern}"`, summary: true }, () => {
|
||||
if (typeof Bun !== "undefined")
|
||||
bench(`Bun (${_name})`, () => {
|
||||
bench("Bun", () => {
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
Bun.$.braces(pattern);
|
||||
}
|
||||
});
|
||||
|
||||
bench(`micromatch/braces ${_name}`, () => {
|
||||
bench("micromatch/braces", () => {
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
braces(pattern, { expand: true });
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import micromatch from "micromatch";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
const Glob = typeof Bun !== "undefined" ? Bun.Glob : undefined;
|
||||
const doMatch = typeof Bun === "undefined" ? micromatch.isMatch : (a, b) => new Glob(b).match(a);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { run, bench, group } from "mitata";
|
||||
import fg from "fast-glob";
|
||||
import { fdir } from "fdir";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const normalPattern = "*.ts";
|
||||
const recursivePattern = "**/*.ts";
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
syntax = "proto3";
|
||||
package benchmark;
|
||||
|
||||
service BenchmarkService {
|
||||
rpc Ping(Request) returns (Response);
|
||||
}
|
||||
|
||||
message Request {
|
||||
string message = 1;
|
||||
}
|
||||
|
||||
message Response {
|
||||
string message = 1;
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFxjCCA66gAwIBAgIUUaQCzOcxcFBP0KwoQfNqD/FoI44wDQYJKoZIhvcNAQEL
|
||||
BQAwYjELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJh
|
||||
bmNpc2NvMQwwCgYDVQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9j
|
||||
YWxob3N0MB4XDTI0MTAxNjAwMDExNloXDTM0MTAxNDAwMDExNlowYjELMAkGA1UE
|
||||
BhMCVVMxCzAJBgNVBAgMAkNBMRYwFAYDVQQHDA1TYW4gRnJhbmNpc2NvMQwwCgYD
|
||||
VQQKDANCdW4xDDAKBgNVBAsMA0J1bjESMBAGA1UEAwwJbG9jYWxob3N0MIICIjAN
|
||||
BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp2s1CWRRV3bkjUxyBefcRCiZj8v6
|
||||
LIIWOb/kFJOo1PQsmQtOOWfY/kNEATPhLtEVolMzsQtaKV+u/Jnp6vU6cCU0qfQ/
|
||||
cha/s0XaSn9zkJSXjmNOPDOXoeJ5wmSUvWETRvDgeYXCg84zTwRnD1pXIsKxHtia
|
||||
SYkTC29skSn0+63GW2Ebzkbn3jcYbk3gfkRO/qw8EDh/4/TcS2SjoHl96E1QcfBX
|
||||
InXrPGoHQhuqJV60rmmkVws0lTIZIq0g2p7iFDCg5TG1asakX7+CrEM/q+oyo3e8
|
||||
RwMfc+9pqFEqyvXGIQSulS+CVKKbpAFMg07UGYe1t0s5iCwfLQ9apaKL31t/3Vkr
|
||||
uVKgy5FrPLnRXkFXDZ1v+43AZBmdLrKODzsqHEbt2JmV0V6JVUkE4kbeJr/nlkhQ
|
||||
x6yXloYY3VKbnCb1L3HmMInrK1QSpxlOb8RllTd33oBwd1FKEvH2gza0j9hqq8uQ
|
||||
hWVN7tlamkgtBteZ8Y9fd3MdxD9iZOx4dVtCX1+sgJFdaL2ZgE0asojn46yT8Uqw
|
||||
5d0M9vqmWc5AqG7c4UWWRrfB1MfOq/X8GtImmKyhEgizIPdWFeF1cNjhPffJv4yR
|
||||
Y4Rj33OBTCM+9h8ZSw/fKo55yRXyz3bjrW2Mg8Dtq+6TcRd5gSLCaTN6jX8E9y7G
|
||||
TobnA9MnKHhSIhsCAwEAAaN0MHIwHQYDVR0OBBYEFEJU6/9ELCp1CAxYJ5FJJxpV
|
||||
FSRmMB8GA1UdIwQYMBaAFEJU6/9ELCp1CAxYJ5FJJxpVFSRmMA8GA1UdEwEB/wQF
|
||||
MAMBAf8wHwYDVR0RBBgwFoIJbG9jYWxob3N0ggkxMjcuMC4wLjEwDQYJKoZIhvcN
|
||||
AQELBQADggIBACyOPdVwfJg1aUNANy78+cm6eoInM9NDdXGWHMqCJwYF6qJTQV11
|
||||
jYwYrl+OWOi3CEC+ogXl+uJX4tSS5d+rBTXEb73cLpogxP+xuxr4cBHhtgpGRpY0
|
||||
GqWCFUTexHxXMrYhHQxf3uv79PNauw/dd1Baby1OjF3zSKRzFsv4KId97cAgT/9H
|
||||
HfUo2ym5jmhNFj5rhUavO3Pw1++1eeDeDAkS6T59buzx0h9760WD20oBdgjt42cb
|
||||
P6xg9OwV7ALQSwJ8YPEXpkl7u+6jy0j5ceYmXh76tAyA+hDYOJrY0opBjSPmXH99
|
||||
p3W63gvk/AdfeAdbFHp6en0b04x4EIogOGZxBP35rzBvsQpqavBE3PBpUIyrQs5p
|
||||
OBUncRrcjEDL6WKh6RJIjZnvpHPrEqOqyxaeWRc4+85ZrVArJHGMc8I+zs9uCFjo
|
||||
Cjfde3d317kCszUTxo0l3azyBpr007PMIUoBF2VJEAyQp2Tz/yu0CbEscNJO/wCn
|
||||
Sb1A6ojaQcgQe2hsaJz/mS+OOjHHaDbCp9iltP2CS63PYleEx4q1Bn8KVRy2zYTB
|
||||
n74y4YaD8Q+hSA6zU741pzqK2SFCpBQnSz757ocr6WspQ47iOonX2giGZS/3KVeK
|
||||
qNzU14+h0b8HaBqZmOvjF+S4G0HDpRwxPzDWgc7dEIWlzHH+ZCqjBFwL
|
||||
-----END CERTIFICATE-----
|
||||
@@ -1,31 +0,0 @@
|
||||
const grpc = require("@grpc/grpc-js");
|
||||
const protoLoader = require("@grpc/proto-loader");
|
||||
const packageDefinition = protoLoader.loadSync("benchmark.proto", {});
|
||||
const proto = grpc.loadPackageDefinition(packageDefinition).benchmark;
|
||||
const fs = require("fs");
|
||||
|
||||
function ping(call, callback) {
|
||||
callback(null, { message: "Hello, World" });
|
||||
}
|
||||
|
||||
function main() {
|
||||
const server = new grpc.Server();
|
||||
server.addService(proto.BenchmarkService.service, { ping: ping });
|
||||
const tls = !!process.env.TLS && (process.env.TLS === "1" || process.env.TLS === "true");
|
||||
const port = process.env.PORT || 50051;
|
||||
const host = process.env.HOST || "localhost";
|
||||
let credentials;
|
||||
if (tls) {
|
||||
const ca = fs.readFileSync("./cert.pem");
|
||||
const key = fs.readFileSync("./key.pem");
|
||||
const cert = fs.readFileSync("./cert.pem");
|
||||
credentials = grpc.ServerCredentials.createSsl(ca, [{ private_key: key, cert_chain: cert }]);
|
||||
} else {
|
||||
credentials = grpc.ServerCredentials.createInsecure();
|
||||
}
|
||||
server.bindAsync(`${host}:${port}`, credentials, () => {
|
||||
console.log(`Server running at ${tls ? "https" : "http"}://${host}:${port}`);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,52 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIJQgIBADANBgkqhkiG9w0BAQEFAASCCSwwggkoAgEAAoICAQCnazUJZFFXduSN
|
||||
THIF59xEKJmPy/osghY5v+QUk6jU9CyZC045Z9j+Q0QBM+Eu0RWiUzOxC1opX678
|
||||
menq9TpwJTSp9D9yFr+zRdpKf3OQlJeOY048M5eh4nnCZJS9YRNG8OB5hcKDzjNP
|
||||
BGcPWlciwrEe2JpJiRMLb2yRKfT7rcZbYRvORufeNxhuTeB+RE7+rDwQOH/j9NxL
|
||||
ZKOgeX3oTVBx8Fcides8agdCG6olXrSuaaRXCzSVMhkirSDanuIUMKDlMbVqxqRf
|
||||
v4KsQz+r6jKjd7xHAx9z72moUSrK9cYhBK6VL4JUopukAUyDTtQZh7W3SzmILB8t
|
||||
D1qloovfW3/dWSu5UqDLkWs8udFeQVcNnW/7jcBkGZ0uso4POyocRu3YmZXRXolV
|
||||
SQTiRt4mv+eWSFDHrJeWhhjdUpucJvUvceYwiesrVBKnGU5vxGWVN3fegHB3UUoS
|
||||
8faDNrSP2Gqry5CFZU3u2VqaSC0G15nxj193cx3EP2Jk7Hh1W0JfX6yAkV1ovZmA
|
||||
TRqyiOfjrJPxSrDl3Qz2+qZZzkCobtzhRZZGt8HUx86r9fwa0iaYrKESCLMg91YV
|
||||
4XVw2OE998m/jJFjhGPfc4FMIz72HxlLD98qjnnJFfLPduOtbYyDwO2r7pNxF3mB
|
||||
IsJpM3qNfwT3LsZOhucD0ycoeFIiGwIDAQABAoICAE+YYrDCZwHEXsjmzVcNcuVc
|
||||
wBVjjt9WQabXGmLGCQClzgY9H8WfH8VSyaQgvDB762MvV2YW1ZjSCunBazrvuAbV
|
||||
SYJ7wyZEtoNO9IdyrMjSPHPPtsRcavzmJalMFIMtAfM6Vh6wf1gW0sIAf9cGxmKa
|
||||
WYcmx8OqTcmkAePKJNT7O1D6jDO39kjpvM3EbLTbWQsva6bylasVIR8fC8QhvsCQ
|
||||
8WwaLfMOSPaCGk1Nxcjai+BYDW/sveUo2lZoJTSLUUT0EaqlxXCsXD3BWSj5F+5t
|
||||
/AFHzdWdIHkIHB2P6V5xFu9fwHjhC3+dh42jqHLNKX2xza0FMKcTAwdzQ094RjL3
|
||||
cOGIsa0Vdt7Mks5eLCRxz0xI3kyrbF0/CopxT0pVWZwUzPk1G+Z3HesWkVtQpg7u
|
||||
RYzsoNKKc5mhc/V+vG290WAcNB4E3m85DgKQr4ib+J/rCy5/SnJYgg4QXsEyNlQ5
|
||||
ESBtRmuPfnrPIxqrDKZ7ZsJv8XFWydXTOfJxeKR1T1S02iYna+z1FnNu+t0ELTr9
|
||||
uhmkuqmV8RJVTub1P2EJPdiku/61UwNLyyZMgFjATDxB0hHIj1FP1HbfhEYbkYNc
|
||||
Dl7a7egJ4KFYWpQ+7MzOmc0OKq1HuJ9H4FhoYpbVq1OQosZ6G3d9afKSZa6dFdK0
|
||||
8ujvdQBR0NlAhc/LAr6BAoIBAQDfD3h9P4i5L8NCdocovCi3Eo0kcNQ3QuvnWrrs
|
||||
B/9CLoWhJrcLV85d0dEX6lSYl9BWW02ilVB+Qvom2wS2td1CBUgDxovX4tCZCuXt
|
||||
otYL/yWWOA7IG0Fjt6YEERQD/tRfKnn8hVBlk5cDTXXxHRGVMku4CHsN3ILtITQS
|
||||
VnVsTrGoWd6mFFA9X9Qu4zR9wKtjGEuL7BT8ixxtXLa2tMjdc4UL140yAgmMemJS
|
||||
TzC6EURe2OnhIzVe9yyLKcqw0prkGHg/Lau5lA1CAh67ZMY4EjO3cuda8R+O7vyO
|
||||
z2afeaTORzzdEbSZPG+8oqIN1/RjRCbl3RXYN8ibSwOzp6X7AoIBAQDAJEVta98J
|
||||
P2/36rXrkl6WrRfYqUPy6vgo/lPuRpp+BQ7ldgmH4+ZrJW5Mxa5hktVujk/C2kAO
|
||||
auzhzNlsxR+c/KwtsL1JXwBn8CT1bR0qvi+URmvGQn9GOKrLLy+6cfphuZWuc4/r
|
||||
hAgXzEjzPcJJJfxA1i2soKPbiFiCGHxot68P4uJSM2sU6QjNIxEjPbTJjEg894pD
|
||||
GJoiRRVHgnzzxL3cqrK90Zn6MAl9f2tYihfddsENeZb5t84LBppxBSGouE3ZH8uD
|
||||
Sufs4DSj1ptocbDbX+0kRNqfjTI5ivDxlS+ZKBe05PVTUmGBAWLamfCe89IW3/z+
|
||||
Rfkh4ZBPtlphAoIBADwjSqPR7kWnN+iCVjxIRl3dNYpelQh1FW7hikW6fjpUmphw
|
||||
/KalPLEUsV/WQIqHW5b8tLihsvrnidPR9rpf29BB5kGGVQuWThEE3CquXTEM0BBo
|
||||
+qs+lemRiMPN6uyM1qr1o7/OHXfVS8CLMMIZyTTFQ57RQoPhMLdH3WcYQj46FTHD
|
||||
UQDLtzpkzKr7fJpuyIZF9ZA6zQmtY7OkbGpj4Ue7LmKb8ahK3lIuaLWyPfvcTeeY
|
||||
aa3WNTxuPWcjlE8J6NKYOksmQAcfgFeMhMaXC83wMltCMlfVbGG30wWZqxxRynoG
|
||||
wMUFUgCCR8m+uxwqXewpYqdUbOBHYeFkXxIfn+MCggEAR5p8wQ1NHd4lNOekCfkP
|
||||
BOnWlChoKRPFjUlSL97h3gq2hW6amKimitF1LGkS1kvo+/1O3heFfZn9UxyK/kzr
|
||||
vg4vgAt4Tup3dUR6EXgrQW2Ev6YKreTEF4Awre2UxM+K9nY5wLxSKvuWJIA9w2AF
|
||||
kkr0mZj3hniK99n02e6UFlY1iB8OJoIA6tb5L7FcxpxNTjrYBNhfDygQ8Kp8Bp0r
|
||||
QZDVDHIUkEaXMjRKpRkiAOndgOurgAEK8V69C0DXtzypUX31jO+bYP8+NPlMxK3K
|
||||
Vn7f4LD75+M88e6lg+oyZmUpStM1GnWksvtlWLUSiNKLaEEGzv2EA6JB+I1dwUb8
|
||||
oQKCAQEAlmisUyn1/lpNnEzKsfUnRs53WxS2e1br5vJ5+pet3cjXT2btfp6J5/mf
|
||||
Tfqv5mZfTjYxydG0Kl3afI/SnhTcRS2/s4svrktZYLOLM2PAGYdCV6j1stXl4ObO
|
||||
eIfjzB3y1Zc2dEcWTylJ/lABoNGMPWFJQ67q8WS37pUHQPseJ++LmZFvlRyBgZBl
|
||||
VLqiHHiZ2ax+yC1ZxY4RECtEiYFplspNldNe+bP/lzTJftsUDe1FqRT/SvEam+1f
|
||||
kb//sbHkJ+l4BEv0Us3SIGwJ0BblhxLYO34IFVpheY4UQBy/nRaeUUdVR9r8JtYD
|
||||
z/cCLOrUJfealezimyd8SKPWPeHhrA==
|
||||
-----END PRIVATE KEY-----
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bun:server": "TLS=1 PORT=50051 bun ./index.js",
|
||||
"node:server": "TLS=1 PORT=50051 node ./index.js",
|
||||
"bench": "ghz --cacert ./cert.pem --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051",
|
||||
"bench:insecure": "ghz --insecure --proto ./benchmark.proto --call benchmark.BenchmarkService.Ping -d '{\"message\": \"Hello\"}' --total=100000 localhost:50051"
|
||||
},
|
||||
"dependencies": {
|
||||
"@grpc/grpc-js": "1.12.0",
|
||||
"@grpc/proto-loader": "0.7.10"
|
||||
}
|
||||
}
|
||||
@@ -1,43 +1,20 @@
|
||||
import { gunzipSync, gzipSync } from "bun";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import { run, bench } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "bun";
|
||||
|
||||
const data = await Bun.file(require.resolve("@babel/standalone/babel.min.js")).arrayBuffer();
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
const libraries = ["zlib"];
|
||||
if (Bun.semver.satisfies(Bun.version.replaceAll("-debug", ""), ">=1.1.21")) {
|
||||
libraries.push("libdeflate");
|
||||
}
|
||||
const options = { library: undefined };
|
||||
const benchFn = (name, fn) => {
|
||||
if (libraries.length > 1) {
|
||||
group(name, () => {
|
||||
for (const library of libraries) {
|
||||
bench(library, () => {
|
||||
options.library = library;
|
||||
fn();
|
||||
});
|
||||
}
|
||||
});
|
||||
} else {
|
||||
options.library = libraries[0];
|
||||
bench(name, () => {
|
||||
fn();
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
benchFn(`roundtrip - @babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(gzipSync(data, options), options);
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
benchFn(`gzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gzipSync(data, options);
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
benchFn(`gunzipSync(@babel/standalone/babel.min.js`, () => {
|
||||
gunzipSync(compressed, options);
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
Binary file not shown.
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { run, bench } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
import { readFileSync } from "fs";
|
||||
import { createRequire } from "module";
|
||||
import { gunzipSync, gzipSync } from "zlib";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { run, bench } from "mitata";
|
||||
import { gzipSync, gunzipSync } from "zlib";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const data = readFileSync(require.resolve("@babel/standalone/babel.min.js"));
|
||||
const data = new TextEncoder().encode("Hello World!".repeat(9999));
|
||||
|
||||
const compressed = gzipSync(data);
|
||||
|
||||
bench(`roundtrip - @babel/standalone/babel.min.js)`, () => {
|
||||
bench(`roundtrip - "Hello World!".repeat(9999))`, () => {
|
||||
gunzipSync(gzipSync(data));
|
||||
});
|
||||
|
||||
bench(`gzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
bench(`gzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gzipSync(data);
|
||||
});
|
||||
|
||||
bench(`gunzipSync(@babel/standalone/babel.min.js))`, () => {
|
||||
bench(`gunzipSync("Hello World!".repeat(9999)))`, () => {
|
||||
gunzipSync(compressed);
|
||||
});
|
||||
|
||||
|
||||
@@ -3,12 +3,9 @@
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench:bun": "$BUN bun.js",
|
||||
"bench:node": "$NODE node.mjs",
|
||||
"bench:deno": "$DENO run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/standalone": "7.24.10"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import ReactDOM from "react-dom";
|
||||
import { Main } from "./main";
|
||||
import classNames from "classnames";
|
||||
import ReactDOM from "react-dom";
|
||||
|
||||
const Base = ({}) => {
|
||||
const name = typeof location !== "undefined" ? decodeURIComponent(location.search.substring(1)) : null;
|
||||
|
||||
@@ -4,11 +4,11 @@
|
||||
* For more information, see https://remix.run/docs/en/main/file-conventions/entry.server
|
||||
*/
|
||||
|
||||
import { PassThrough } from "node:stream";
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { Response } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import isbot from "isbot";
|
||||
import { PassThrough } from "node:stream";
|
||||
import { renderToPipeableStream } from "react-dom/server";
|
||||
|
||||
const ABORT_DELAY = 5_000;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("JSON.stringify({hello: 'world'})", () => JSON.stringify({ hello: "world" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("console.log('hello')", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "../node_modules/mitata/src/cli.mjs";
|
||||
|
||||
bench("console.log", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
bench("console.log", () => console.log("hello"));
|
||||
bench("console.log({ hello: 'object' })", () => console.log({ hello: "object" }));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user