Compare commits

...

23 Commits

Author SHA1 Message Date
Ashcon Partovi
ea5fe0ceb6 Disable logs 2025-05-01 17:56:31 -07:00
Ashcon Partovi
c351cc54e9 Fix parsing logic in expectations 2025-05-01 17:19:55 -07:00
Ashcon Partovi
784625374f remove aarch64 asan, llvm crashes 2025-05-01 17:19:55 -07:00
Ashcon Partovi
a882f75d25 Fix 2025-05-01 17:19:55 -07:00
Ashcon Partovi
5ec04c1f8f Use cmake tar 2025-05-01 17:19:55 -07:00
Ashcon Partovi
40329916ff maybe fix rust/musl? 2025-05-01 17:19:55 -07:00
Ashcon Partovi
32023351f7 Fix musl and rust 2025-05-01 17:19:55 -07:00
Ashcon Partovi
a95a2eb6b4 Fix platform calcs 2025-05-01 17:19:55 -07:00
Ashcon Partovi
4f007193be full build 2025-05-01 17:19:55 -07:00
Ashcon Partovi
1eb2f6cc06 More expectations 2025-05-01 17:19:55 -07:00
Ashcon Partovi
5f55498983 debug 2025-05-01 17:19:55 -07:00
Ashcon Partovi
a95fe5edd9 debug 2025-05-01 17:19:55 -07:00
Ashcon Partovi
deca13b159 debug 2025-05-01 17:19:55 -07:00
Ashcon Partovi
2faccb7e5f Try again 2025-05-01 17:19:55 -07:00
Ashcon Partovi
2ff19cc38f Fix 2025-05-01 17:19:55 -07:00
Ashcon Partovi
2bc17d86bf Fix 2025-05-01 17:19:54 -07:00
Ashcon Partovi
e3bcc461fd test 2025-05-01 17:19:54 -07:00
Ashcon Partovi
24d61df72b Fix 2025-05-01 17:19:54 -07:00
Ashcon Partovi
d8ce65fb6b gz 2025-05-01 17:19:54 -07:00
Ashcon Partovi
24229f9f50 asan 2025-05-01 17:19:54 -07:00
Ashcon Partovi
3c1d7213c0 asan 2025-05-01 17:19:54 -07:00
Ashcon Partovi
7f483cc9cf asan 2025-05-01 17:19:54 -07:00
Ashcon Partovi
c3f7057d97 asan [build images] 2025-05-01 17:19:54 -07:00
35 changed files with 726 additions and 371 deletions

View File

@@ -35,7 +35,7 @@ import {
* @typedef {"musl"} Abi
* @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro
* @typedef {"latest" | "previous" | "oldest" | "eol"} Tier
* @typedef {"release" | "assert" | "debug"} Profile
* @typedef {"release" | "assert" | "debug" | "asan"} Profile
*/
/**
@@ -107,6 +107,7 @@ const buildPlatforms = [
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" },
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" },
@@ -125,14 +126,12 @@ const testPlatforms = [
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" },
{ os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" },
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" },
@@ -379,30 +378,32 @@ function getTestAgent(platform, options) {
* @returns {Record<string, string | undefined>}
*/
function getBuildEnv(target, options) {
const { profile, baseline, abi } = target;
const release = !profile || profile === "release";
const { baseline, abi } = target;
const { canary } = options;
const revision = typeof canary === "number" ? canary : 1;
const isMusl = abi === "musl";
let CMAKE_BUILD_TYPE = release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo";
if (isMusl && release) {
CMAKE_BUILD_TYPE = "MinSizeRel";
}
return {
CMAKE_BUILD_TYPE,
ENABLE_BASELINE: baseline ? "ON" : "OFF",
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
CANARY_REVISION: revision,
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
ENABLE_LOGS: release ? "OFF" : "ON",
ABI: isMusl ? "musl" : undefined,
ABI: abi === "musl" ? "musl" : undefined,
CMAKE_VERBOSE_MAKEFILE: "ON",
CMAKE_TLS_VERIFY: "0",
};
}
/**
* @param {Target} target
* @param {PipelineOptions} options
* @returns {string}
*/
function getBuildCommand(target, options) {
const { profile } = target;
const label = profile || "release";
return `bun run build:${label}`;
}
/**
* @param {Platform} platform
* @param {PipelineOptions} options
@@ -416,7 +417,7 @@ function getBuildVendorStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: "bun run build:ci --target dependencies",
command: `${getBuildCommand(platform, options)} --target dependencies`,
};
}
@@ -426,6 +427,7 @@ function getBuildVendorStep(platform, options) {
* @returns {Step}
*/
function getBuildCppStep(platform, options) {
const command = getBuildCommand(platform, options);
return {
key: `${getTargetKey(platform)}-build-cpp`,
label: `${getTargetLabel(platform)} - build-cpp`,
@@ -439,7 +441,7 @@ function getBuildCppStep(platform, options) {
// We used to build the C++ dependencies and bun in seperate steps.
// However, as long as the zig build takes longer than both sequentially,
// it's cheaper to run them in the same step. Can be revisited in the future.
command: ["bun run build:ci --target bun", "bun run build:ci --target dependencies"],
command: [`${command} --target bun`, `${command} --target dependencies`],
};
}
@@ -473,7 +475,7 @@ function getBuildZigStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
command: `${getBuildCommand(platform, options)} --target bun-zig --toolchain ${toolchain}`,
timeout_in_minutes: 35,
};
}
@@ -495,7 +497,7 @@ function getLinkBunStep(platform, options) {
BUN_LINK_ONLY: "ON",
...getBuildEnv(platform, options),
},
command: "bun run build:ci --target bun",
command: `${getBuildCommand(platform, options)} --target bun`,
};
}
@@ -512,7 +514,7 @@ function getBuildBunStep(platform, options) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
env: getBuildEnv(platform, options),
command: "bun run build:ci",
command: getBuildCommand(platform, options),
};
}
@@ -531,7 +533,7 @@ function getBuildBunStep(platform, options) {
* @returns {Step}
*/
function getTestBunStep(platform, options, testOptions = {}) {
const { os } = platform;
const { os, profile } = platform;
const { buildId, unifiedTests, testFiles } = testOptions;
const args = [`--step=${getTargetKey(platform)}-build-bun`];
@@ -555,6 +557,7 @@ function getTestBunStep(platform, options, testOptions = {}) {
retry: getRetry(),
cancel_on_build_failing: isMergeQueue(),
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
timeout_in_minutes: profile === "asan" ? 90 : 30,
command:
os === "windows"
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
@@ -628,17 +631,18 @@ function getReleaseStep(buildPlatforms, options) {
}
/**
* @param {Platform[]} buildPlatforms
* @returns {Step}
*/
function getBenchmarkStep() {
function getBenchmarkStep(buildPlatforms) {
return {
key: "benchmark",
label: "📊",
agents: {
queue: "build-zig",
queue: "build-image",
},
command: "bun .buildkite/scripts/upload-benchmark.ts",
depends_on: [`linux-x64-build-bun`],
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
command: "node .buildkite/scripts/upload-benchmark.ts",
};
}
@@ -728,7 +732,6 @@ function getBenchmarkStep() {
* @property {string | boolean} [buildImages]
* @property {string | boolean} [publishImages]
* @property {number} [canary]
* @property {Profile[]} [buildProfiles]
* @property {Platform[]} [buildPlatforms]
* @property {Platform[]} [testPlatforms]
* @property {string[]} [testFiles]
@@ -823,6 +826,10 @@ function getOptionsStep() {
label: `${getEmoji("assert")} Release with Assertions`,
value: "assert",
},
{
label: `${getEmoji("asan")} Release with ASAN`,
value: "asan",
},
{
label: `${getEmoji("debug")} Debug`,
value: "debug",
@@ -965,6 +972,7 @@ async function getPipelineOptions() {
?.map(item => item.trim())
?.filter(Boolean);
const buildProfiles = parseArray(options["build-profiles"]);
const buildPlatformKeys = parseArray(options["build-platforms"]);
const testPlatformKeys = parseArray(options["test-platforms"]);
return {
@@ -977,12 +985,11 @@ async function getPipelineOptions() {
testFiles: parseArray(options["test-files"]),
unifiedBuilds: parseBoolean(options["unified-builds"]),
unifiedTests: parseBoolean(options["unified-tests"]),
buildProfiles: parseArray(options["build-profiles"]),
buildPlatforms: buildPlatformKeys?.length
? buildPlatformKeys.map(key => buildPlatformsMap.get(key))
? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile })))
: Array.from(buildPlatformsMap.values()),
testPlatforms: testPlatformKeys?.length
? testPlatformKeys.map(key => testPlatformsMap.get(key))
? testPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...testPlatformsMap.get(key), profile })))
: Array.from(testPlatformsMap.values()),
dryRun: parseBoolean(options["dry-run"]),
};
@@ -1017,7 +1024,6 @@ async function getPipelineOptions() {
publishImages: parseOption(/\[(publish images?)\]/i),
buildPlatforms: Array.from(buildPlatformsMap.values()),
testPlatforms: Array.from(testPlatformsMap.values()),
buildProfiles: ["release"],
};
}
@@ -1040,7 +1046,7 @@ async function getPipeline(options = {}) {
return;
}
const { buildProfiles = [], buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options;
const { buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options;
const imagePlatforms = new Map(
buildImages || publishImages
? [...buildPlatforms, ...testPlatforms]
@@ -1077,22 +1083,20 @@ async function getPipeline(options = {}) {
if (!buildId) {
steps.push(
...buildPlatforms
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
.map(target => {
const imageKey = getImageKey(target);
...buildPlatforms.map(target => {
const imageKey = getImageKey(target);
return getStepWithDependsOn(
{
key: getTargetKey(target),
group: getTargetLabel(target),
steps: unifiedBuilds
? [getBuildBunStep(target, options)]
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
},
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
);
}),
return getStepWithDependsOn(
{
key: getTargetKey(target),
group: getTargetLabel(target),
steps: unifiedBuilds
? [getBuildBunStep(target, options)]
: [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)],
},
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
);
}),
);
}
@@ -1100,23 +1104,20 @@ async function getPipeline(options = {}) {
const { skipTests, forceTests, unifiedTests, testFiles } = options;
if (!skipTests || forceTests) {
steps.push(
...testPlatforms
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
.map(target => ({
key: getTargetKey(target),
group: getTargetLabel(target),
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
})),
...testPlatforms.map(target => ({
key: getTargetKey(target),
group: getTargetLabel(target),
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
})),
);
}
}
if (isMainBranch()) {
steps.push(getReleaseStep(buildPlatforms, options));
steps.push(getBenchmarkStep(buildPlatforms));
}
steps.push(getBenchmarkStep());
/** @type {Map<string, GroupStep>} */
const stepsByGroup = new Map();

View File

@@ -92,6 +92,7 @@ const BunBuildOptions = struct {
opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM"));
opts.addOption(bool, "baseline", this.isBaseline());
opts.addOption(bool, "enable_logs", this.enable_logs);
opts.addOption(bool, "enable_asan", this.enable_asan);
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);

View File

@@ -18,6 +18,7 @@ foreach(target ${targets})
endforeach()
# --- CPU target ---
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
register_compiler_flags(-mcpu=apple-m1)
@@ -35,6 +36,7 @@ else()
endif()
# --- MSVC runtime ---
if(WIN32)
register_compiler_flags(
DESCRIPTION "Use static MSVC runtime"
@@ -44,14 +46,8 @@ if(WIN32)
)
endif()
if(ENABLE_ASAN)
register_compiler_flags(
DESCRIPTION "Enable AddressSanitizer"
-fsanitize=address
)
endif()
# --- Optimization level ---
if(DEBUG)
register_compiler_flags(
DESCRIPTION "Disable optimization"
@@ -73,6 +69,7 @@ else()
endif()
# --- Debug level ---
if(WIN32)
register_compiler_flags(
DESCRIPTION "Enable debug symbols (.pdb)"
@@ -104,6 +101,7 @@ endif()
# -fno-eliminate-unused-debug-types # Don't eliminate unused debug symbols
# --- C/C++ flags ---
register_compiler_flags(
DESCRIPTION "Disable C/C++ exceptions"
-fno-exceptions ${UNIX}
@@ -166,7 +164,7 @@ register_compiler_flags(
# having this enabled in debug mode on macOS >=14 causes libarchive to fail to configure with the error:
# > pid_t doesn't exist on this platform?
if((DEBUG AND LINUX) OR((NOT DEBUG) AND UNIX))
if((DEBUG AND LINUX) OR ((NOT DEBUG) AND UNIX))
register_compiler_flags(
DESCRIPTION "Emit an address-significance table"
-faddrsig
@@ -185,7 +183,20 @@ if(WIN32)
)
endif()
if(ENABLE_ASAN)
register_compiler_flags(
DESCRIPTION "Enable AddressSanitizer"
-fsanitize=address
)
register_linker_flags(
DESCRIPTION "Enable AddressSanitizer"
-fsanitize=address
)
endif()
# --- Linker flags ---
if(LINUX)
register_linker_flags(
DESCRIPTION "Disable relocation read-only (RELRO)"
@@ -246,6 +257,7 @@ else()
endif()
# --- Diagnostics ---
if(UNIX)
register_compiler_flags(
DESCRIPTION "Enable color diagnostics"
@@ -259,6 +271,7 @@ register_compiler_flags(
)
# --- LTO ---
if(ENABLE_LTO)
register_compiler_flags(
DESCRIPTION "Enable link-time optimization (LTO)"
@@ -284,6 +297,7 @@ if(ENABLE_LTO)
endif()
# --- Remapping ---
if(UNIX AND CI)
register_compiler_flags(
DESCRIPTION "Remap source files"

View File

@@ -393,7 +393,7 @@ function(register_command)
endforeach()
foreach(source ${CMD_SOURCES})
if(NOT source MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})")
if(NOT source MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH}|${CARGO_HOME}|${RUSTUP_HOME})")
message(FATAL_ERROR "register_command: SOURCES contains \"${source}\", if it's a path, make it absolute, otherwise add it to TARGETS instead")
endif()
list(APPEND CMD_EFFECTIVE_DEPENDS ${source})
@@ -406,29 +406,31 @@ function(register_command)
set(CMD_EFFECTIVE_OUTPUTS)
foreach(output ${CMD_OUTPUTS})
if(NOT output MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})")
if(NOT output MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH}|${CARGO_HOME}|${RUSTUP_HOME})")
message(FATAL_ERROR "register_command: OUTPUTS contains \"${output}\", if it's a path, make it absolute")
endif()
list(APPEND CMD_EFFECTIVE_OUTPUTS ${output})
endforeach()
foreach(artifact ${CMD_ARTIFACTS})
if(NOT artifact MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH})")
if(NOT artifact MATCHES "^(${CWD}|${BUILD_PATH}|${CACHE_PATH}|${VENDOR_PATH}|${CARGO_HOME}|${RUSTUP_HOME})")
message(FATAL_ERROR "register_command: ARTIFACTS contains \"${artifact}\", if it's a path, make it absolute")
endif()
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
if(BUILDKITE)
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
if(filename STREQUAL "libbun-profile.a")
# libbun-profile.a is now over 5gb in size, compress it first
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-profile.a)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz)
else()
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
if(artifact MATCHES "\\.tar\\.gz$")
string(REGEX REPLACE "\\.tar\\.gz$" "" filename ${artifact})
if(BUILDKITE)
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} ${CMAKE_COMMAND} -E tar cz ${artifact} ${filename})
file(RELATIVE_PATH artifactname ${BUILD_PATH} ${artifact})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${artifactname})
endif()
set(artifact ${filename})
elseif(BUILDKITE)
file(RELATIVE_PATH filename ${BUILD_PATH} ${artifact})
list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename})
endif()
list(APPEND CMD_EFFECTIVE_OUTPUTS ${artifact})
endforeach()
foreach(output ${CMD_EFFECTIVE_OUTPUTS})
@@ -700,8 +702,8 @@ function(register_cmake_command)
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
set(setFlags GENERATOR BUILD_TYPE)
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
set(setFlags GENERATOR BUILD_TYPE TOOLCHAIN_FILE)
set(appendFlags C_FLAGS CXX_FLAGS ASM_FLAGS LINKER_FLAGS)
set(specialFlags POSITION_INDEPENDENT_CODE)
set(flags ${setFlags} ${appendFlags} ${specialFlags})
@@ -851,7 +853,7 @@ function(register_compiler_flags)
cmake_parse_arguments(COMPILER "" "${args}" "${multiArgs}" ${ARGN})
if(NOT COMPILER_LANGUAGES)
set(COMPILER_LANGUAGES C CXX)
set(COMPILER_LANGUAGES C CXX ASM)
endif()
set(COMPILER_FLAGS)
@@ -932,7 +934,7 @@ endfunction()
function(print_compiler_flags)
get_property(targets DIRECTORY PROPERTY BUILDSYSTEM_TARGETS)
set(languages C CXX)
set(languages C CXX ASM)
foreach(target ${targets})
get_target_property(type ${target} TYPE)
message(STATUS "Target: ${target}")

View File

@@ -26,15 +26,6 @@ else()
setx(DEBUG OFF)
endif()
optionx(BUN_TEST BOOL "Build Bun's unit test suite instead of the normal build" DEFAULT OFF)
if (BUN_TEST)
setx(TEST ON)
else()
setx(TEST OFF)
endif()
if(CMAKE_BUILD_TYPE MATCHES "MinSizeRel")
setx(ENABLE_SMOL ON)
endif()
@@ -46,7 +37,7 @@ elseif(WIN32)
elseif(LINUX)
setx(OS "linux")
else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
unsupported(CMAKE_SYSTEM_NAME)
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
@@ -54,7 +45,7 @@ if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
setx(ARCH "x64")
else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
unsupported(CMAKE_SYSTEM_PROCESSOR)
endif()
if(LINUX)
@@ -71,14 +62,7 @@ if(ARCH STREQUAL "x64")
optionx(ENABLE_BASELINE BOOL "If baseline features should be used for older CPUs (e.g. disables AVX, AVX2)" DEFAULT OFF)
endif()
# Disabling logs by default for tests yields faster builds
if (DEBUG AND NOT TEST)
set(DEFAULT_ENABLE_LOGS ON)
else()
set(DEFAULT_ENABLE_LOGS OFF)
endif()
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEFAULT_ENABLE_LOGS})
optionx(ENABLE_LOGS BOOL "If debug logs should be enabled" DEFAULT ${DEBUG})
optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT ${DEBUG})
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
@@ -101,18 +85,24 @@ optionx(ENABLE_LTO BOOL "If LTO (link-time optimization) should be used" DEFAULT
if(LINUX)
optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF)
if(ENABLE_VALGRIND AND NOT ENABLE_BASELINE)
message(WARNING "If valgrind is enabled, baseline must also be enabled")
setx(ENABLE_BASELINE ON)
endif()
endif()
if(DEBUG AND APPLE AND CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ON)
if(DEBUG AND APPLE AND ARCH STREQUAL "aarch64")
set(DEFAULT_ASAN ON)
else()
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT OFF)
set(DEFAULT_ASAN OFF)
endif()
optionx(ENABLE_PRETTIER BOOL "If prettier should be ran" DEFAULT OFF)
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
if(USE_VALGRIND AND NOT USE_BASELINE)
message(WARNING "If valgrind is enabled, baseline must also be enabled")
setx(USE_BASELINE ON)
if(ENABLE_ASAN AND ENABLE_LTO)
message(WARNING "ASAN with LTO is not supported, disabling LTO")
setx(ENABLE_LTO OFF)
endif()
if(BUILDKITE_COMMIT)

View File

@@ -1,8 +1,7 @@
if(DEBUG)
set(bun bun-debug)
# elseif(ENABLE_SMOL)
# set(bun bun-smol-profile)
# set(bunStrip bun-smol)
elseif(ENABLE_ASAN)
set(bun bun-asan)
elseif(ENABLE_VALGRIND)
set(bun bun-valgrind)
elseif(ENABLE_ASSERTIONS)
@@ -12,10 +11,6 @@ else()
set(bunStrip bun)
endif()
if(TEST)
set(bun ${bun}-test)
endif()
set(bunExe ${bun}${CMAKE_EXECUTABLE_SUFFIX})
if(bunStrip)
@@ -361,7 +356,6 @@ register_command(
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
OUTPUTS
${CODEGEN_PATH}/bake_empty_file
${BUN_BAKE_RUNTIME_OUTPUTS}
)
@@ -544,15 +538,9 @@ set(BUN_ZIG_GENERATED_SOURCES
${BUN_ERROR_CODE_OUTPUTS}
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
${BUN_BAKE_RUNTIME_OUTPUTS}
)
# In debug builds, these are not embedded, but rather referenced at runtime.
if (DEBUG)
list(APPEND BUN_ZIG_GENERATED_SOURCES ${CODEGEN_PATH}/bake_empty_file)
else()
list(APPEND BUN_ZIG_GENERATED_SOURCES ${BUN_BAKE_RUNTIME_OUTPUTS})
endif()
if (TEST)
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-test.o)
set(ZIG_STEPS test)
@@ -599,6 +587,7 @@ register_command(
-Doptimize=${ZIG_OPTIMIZE}
-Dcpu=${ZIG_CPU}
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>
-Dversion=${VERSION}
-Dreported_nodejs_version=${NODEJS_VERSION}
-Dcanary=${CANARY_REVISION}
@@ -737,7 +726,7 @@ elseif(BUN_CPP_ONLY)
COMMAND
${CMAKE_COMMAND} -E true
ARTIFACTS
${BUN_CPP_OUTPUT}
${BUN_CPP_OUTPUT}.tar.gz
)
else()
add_executable(${bun} ${BUN_CPP_SOURCES} ${WINDOWS_RESOURCES})
@@ -871,7 +860,7 @@ if(NOT WIN32)
)
if(DEBUG)
# TODO: this shouldn't be necessary long term
if (NOT ABI STREQUAL "musl")
if(NOT ABI STREQUAL "musl" AND NOT CMAKE_CROSSCOMPILING)
target_compile_options(${bun} PUBLIC
-fsanitize=null
-fsanitize-recover=all
@@ -888,15 +877,6 @@ if(NOT WIN32)
)
endif()
if (ENABLE_ASAN)
target_compile_options(${bun} PUBLIC
-fsanitize=address
)
target_link_libraries(${bun} PUBLIC
-fsanitize=address
)
endif()
target_compile_options(${bun} PUBLIC
-Werror=return-type
-Werror=return-stack-address
@@ -1201,44 +1181,47 @@ if(NOT BUN_CPP_ONLY)
)
endif()
register_command(
TARGET
${bun}
TARGET_PHASE
POST_BUILD
COMMENT
"Testing ${bun}"
COMMAND
${CMAKE_COMMAND}
-E env BUN_DEBUG_QUIET_LOGS=1
${BUILD_PATH}/${bunExe}
--revision
CWD
${BUILD_PATH}
)
if(CI)
set(BUN_FEATURES_SCRIPT ${CWD}/scripts/features.mjs)
# If cross-compiling, we can't test the binary
if(NOT CMAKE_CROSSCOMPILING)
register_command(
TARGET
${bun}
TARGET_PHASE
POST_BUILD
COMMENT
"Generating features.json"
"Testing ${bun}"
COMMAND
${CMAKE_COMMAND}
-E env
BUN_GARBAGE_COLLECTOR_LEVEL=1
BUN_DEBUG_QUIET_LOGS=1
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING=1
${BUILD_PATH}/${bunExe}
${BUN_FEATURES_SCRIPT}
-E env BUN_DEBUG_QUIET_LOGS=1
${BUILD_PATH}/${bunExe}
--revision
CWD
${BUILD_PATH}
ARTIFACTS
${BUILD_PATH}/features.json
)
if(CI)
set(BUN_FEATURES_SCRIPT ${CWD}/scripts/features.mjs)
register_command(
TARGET
${bun}
TARGET_PHASE
POST_BUILD
COMMENT
"Generating features.json"
COMMAND
${CMAKE_COMMAND}
-E env
BUN_GARBAGE_COLLECTOR_LEVEL=1
BUN_DEBUG_QUIET_LOGS=1
BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING=1
${BUILD_PATH}/${bunExe}
${BUN_FEATURES_SCRIPT}
CWD
${BUILD_PATH}
ARTIFACTS
${BUILD_PATH}/features.json
)
endif()
endif()
if(CMAKE_HOST_APPLE AND bunStrip)
@@ -1266,14 +1249,23 @@ if(NOT BUN_CPP_ONLY)
if(CI)
set(bunTriplet bun-${OS}-${ARCH})
if(LINUX AND ABI STREQUAL "musl")
set(bunTriplet ${bunTriplet}-musl)
endif()
if(ENABLE_BASELINE)
set(bunTriplet ${bunTriplet}-baseline)
endif()
string(REPLACE bun ${bunTriplet} bunPath ${bun})
set(bunFiles ${bunExe} features.json)
set(bunFiles ${bunExe})
if(NOT CMAKE_CROSSCOMPILING)
list(APPEND bunFiles features.json)
endif()
if(WIN32)
list(APPEND bunFiles ${bun}.pdb)
elseif(APPLE)
@@ -1284,7 +1276,6 @@ if(NOT BUN_CPP_ONLY)
list(APPEND bunFiles ${bun}.linker-map)
endif()
register_command(
TARGET
${bun}

View File

@@ -16,21 +16,29 @@ else()
set(LOLHTML_BUILD_TYPE release)
endif()
set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX})
if(CMAKE_CROSSCOMPILING)
set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/${RUST_TARGET}/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX})
else()
set(LOLHTML_LIBRARY ${LOLHTML_BUILD_PATH}/${LOLHTML_BUILD_TYPE}/${CMAKE_STATIC_LIBRARY_PREFIX}lolhtml${CMAKE_STATIC_LIBRARY_SUFFIX})
endif()
set(LOLHTML_BUILD_ARGS
--target-dir ${BUILD_PATH}/lolhtml
)
set(LOLHTML_BUILD_ARGS --target-dir ${BUILD_PATH}/lolhtml)
if(RELEASE)
list(APPEND LOLHTML_BUILD_ARGS --release)
endif()
if(CMAKE_CROSSCOMPILING)
list(APPEND LOLHTML_BUILD_ARGS --target ${RUST_TARGET})
endif()
# Windows requires unwind tables, apparently.
if (NOT WIN32)
# The encoded escape sequences are intentional. They're how you delimit multiple arguments in a single environment variable.
# Also add rust optimization flag for smaller binary size, but not huge speed penalty.
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
if(NOT WIN32)
set(RUST_FLAGS "-Cpanic=abort -Cdebuginfo=0 -Cforce-unwind-tables=no -Copt-level=s")
endif()
if(TARGET clone-rust)
set(LOLHTML_TARGETS clone-rust)
endif()
register_command(
@@ -42,15 +50,22 @@ register_command(
${CARGO_EXECUTABLE}
build
${LOLHTML_BUILD_ARGS}
TARGETS
${LOLHTML_TARGETS}
ARTIFACTS
${LOLHTML_LIBRARY}
ENVIRONMENT
CARGO_TERM_COLOR=always
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
CC=${CMAKE_C_COMPILER}
CFLAGS=${CMAKE_C_FLAGS}
CXX=${CMAKE_CXX_COMPILER}
CXXFLAGS=${CMAKE_CXX_FLAGS}
AR=${CMAKE_AR}
RUSTFLAGS=${RUST_FLAGS}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -1,6 +1,8 @@
set(CMAKE_SYSTEM_NAME Darwin)
set(CMAKE_SYSTEM_PROCESSOR x64)
set(CMAKE_OSX_ARCHITECTURES x86_64)
set(CMAKE_C_COMPILER_WORKS ON)
set(CMAKE_CXX_COMPILER_WORKS ON)
set(CMAKE_C_COMPILER_TARGET x86_64-apple-darwin)
set(CMAKE_CXX_COMPILER_TARGET x86_64-apple-darwin)
set(CMAKE_ASM_COMPILER_TARGET x86_64-apple-darwin)
set(ZIG_TARGET x86_64-macos-none)
set(RUST_TARGET x86_64-apple-darwin)

View File

@@ -120,14 +120,16 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
endif()
if(BUILDKITE)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a")
set(BUILDKITE_ARTIFACT_PATH libbun-profile.a.gz)
endif()
set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID})
else()
set(BUILDKITE_DOWNLOAD_COMMAND curl -L -o ${BUILDKITE_ARTIFACT_PATH} ${BUILDKITE_ARTIFACTS_URL}/${BUILDKITE_ARTIFACT_ID})
endif()
if(BUILDKITE_ARTIFACT_PATH MATCHES "\\.tar\\.gz$")
list(APPEND BUILDKITE_DOWNLOAD_COMMAND COMMAND tar xf ${BUILDKITE_ARTIFACT_PATH})
string(REGEX REPLACE "\\.tar\\.gz$" "" BUILDKITE_ARTIFACT_PATH ${BUILDKITE_ARTIFACT_PATH})
endif()
add_custom_command(
COMMENT
"Downloading ${BUILDKITE_ARTIFACT_PATH}"
@@ -138,20 +140,6 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX})
OUTPUT
${BUILD_PATH}/${BUILDKITE_ARTIFACT_PATH}
)
if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a.gz")
add_custom_command(
COMMENT
"Unpacking libbun-profile.a.gz"
VERBATIM COMMAND
gunzip libbun-profile.a.gz
WORKING_DIRECTORY
${BUILD_PATH}
OUTPUT
${BUILD_PATH}/libbun-profile.a
DEPENDS
${BUILD_PATH}/libbun-profile.a.gz
)
endif()
endforeach()
list(APPEND BUILDKITE_JOBS_MATCH ${BUILDKITE_JOB_NAME})

View File

@@ -1,3 +1,33 @@
optionx(ENABLE_RUST BOOL "If Rust should be used for compilation" DEFAULT ON)
if(NOT ENABLE_RUST)
return()
endif()
if(ARCH STREQUAL "x64")
set(DEFAULT_RUST_ARCH x86_64)
elseif(ARCH STREQUAL "aarch64")
set(DEFAULT_RUST_ARCH aarch64)
else()
unsupported(ARCH)
endif()
if(APPLE)
set(DEFAULT_RUST_TARGET ${DEFAULT_RUST_ARCH}-apple-darwin)
elseif(LINUX)
if(ABI STREQUAL "musl")
set(DEFAULT_RUST_TARGET ${DEFAULT_RUST_ARCH}-unknown-linux-musl)
else()
set(DEFAULT_RUST_TARGET ${DEFAULT_RUST_ARCH}-unknown-linux-gnu)
endif()
elseif(WIN32)
set(DEFAULT_RUST_TARGET ${DEFAULT_RUST_ARCH}-pc-windows-msvc)
else()
unsupported(CMAKE_SYSTEM_NAME)
endif()
optionx(RUST_TARGET STRING "The target architecture for Rust" DEFAULT ${DEFAULT_RUST_TARGET})
if(DEFINED ENV{CARGO_HOME})
set(CARGO_HOME $ENV{CARGO_HOME})
elseif(CMAKE_HOST_WIN32)
@@ -9,6 +39,15 @@ else()
set(CARGO_HOME $ENV{HOME}/.cargo)
endif()
find_command(
VARIABLE
CARGO_EXECUTABLE
COMMAND
cargo
PATHS
${CARGO_HOME}/bin
)
if(DEFINED ENV{RUSTUP_HOME})
set(RUSTUP_HOME $ENV{RUSTUP_HOME})
elseif(CMAKE_HOST_WIN32)
@@ -20,33 +59,27 @@ else()
set(RUSTUP_HOME $ENV{HOME}/.rustup)
endif()
find_command(
VARIABLE
CARGO_EXECUTABLE
COMMAND
cargo
PATHS
${CARGO_HOME}/bin
REQUIRED
OFF
)
if(CMAKE_CROSSCOMPILING)
find_command(
VARIABLE
RUSTUP_EXECUTABLE
COMMAND
rustup
PATHS
${CARGO_HOME}/bin
)
if(EXISTS ${CARGO_EXECUTABLE})
if(CARGO_EXECUTABLE MATCHES "^${CARGO_HOME}")
setx(CARGO_HOME ${CARGO_HOME})
setx(RUSTUP_HOME ${RUSTUP_HOME})
endif()
return()
register_command(
TARGET
clone-rust
COMMENT
"Downloading Rust toolchain: ${RUST_TARGET}"
COMMAND
${RUSTUP_EXECUTABLE}
target
add
${RUST_TARGET}
OUTPUTS
${CARGO_EXECUTABLE}
)
endif()
if(CMAKE_HOST_WIN32)
set(CARGO_INSTALL_COMMAND "choco install rust")
else()
set(CARGO_INSTALL_COMMAND "curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh")
endif()
message(FATAL_ERROR "Command not found: cargo\n"
"Do you have Rust installed? To fix this, try running:\n"
" ${CARGO_INSTALL_COMMAND}\n"
)

View File

@@ -56,15 +56,15 @@ elseif(APPLE)
elseif(UNIX)
set(WEBKIT_OS "linux")
else()
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
unsupported(OS)
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
if(ARCH STREQUAL "aarch64")
set(WEBKIT_ARCH "arm64")
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
elseif(ARCH STREQUAL "x64")
set(WEBKIT_ARCH "amd64")
else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
unsupported(ARCH)
endif()
if(LINUX AND ABI STREQUAL "musl")

View File

@@ -24,7 +24,11 @@ set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5")
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
if(CMAKE_BUILD_TYPE STREQUAL "Release")
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
if(ENABLE_ASAN)
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
else()
set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast")
endif()
elseif(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo")
set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe")
elseif(CMAKE_BUILD_TYPE STREQUAL "MinSizeRel")

View File

@@ -38,6 +38,7 @@
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
"build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert",
"build:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=OFF -DENABLE_ASAN=ON -DENABLE_LTO=OFF -B build/release-asan",
"build:logs": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=ON -B build/release-logs",
"build:safe": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=ReleaseSafe -B build/release-safe",
"build:smol": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=MinSizeRel -B build/release-smol",
@@ -58,9 +59,6 @@
"test:release": "node scripts/runner.node.mjs --exec-path ./build/release/bun",
"banned": "bun test test/internal/ban-words.test.ts",
"zig": "vendor/zig/zig.exe",
"zig:test": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DBUN_TEST=ON -B build/debug",
"zig:test:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DBUNTEST=ON -B build/release",
"zig:test:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DBUN_TEST=ON -DZIG_OPTIMIZE=ReleaseSafe -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
"zig:fmt": "bun run zig-format",
"zig:check": "bun run zig build check --summary new",
"zig:check-all": "bun run zig build check-all --summary new",

View File

@@ -1364,7 +1364,7 @@ void us_internal_ssl_socket_context_add_server_name(
if (ssl_context) {
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
#if BUN_DEBUG
#if ASSERT_ENABLED
printf("CANNOT SET EX DATA!\n");
abort();
#endif
@@ -1392,7 +1392,7 @@ int us_bun_internal_ssl_socket_context_add_server_name(
/* Attach the user data to this context */
if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) {
#if BUN_DEBUG
#if ASSERT_ENABLED
printf("CANNOT SET EX DATA!\n");
abort();
#endif

View File

@@ -29,7 +29,6 @@ endif()
if(CMAKE_C_COMPILER_ID MATCHES "GNU")
add_compile_options(
-fheinous-gnu-extensions
-Wno-string-plus-int
-Wno-deprecated-declarations
)
@@ -57,81 +56,100 @@ if(WIN32)
)
endif()
file(READ ${CMAKE_CURRENT_SOURCE_DIR}/VERSION TCC_VERSION)
option(BUILD_C2STR "Build the c2str utility" ON)
option(BUILD_TCC "Build the tcc compiler" ON)
add_compile_definitions(TCC_VERSION=\"${TCC_VERSION}\")
if(BUILD_C2STR)
if(CMAKE_CROSSCOMPILING)
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/c2str)
add_custom_target(c2str.exe
COMMAND
${CMAKE_COMMAND}
-S ${CMAKE_CURRENT_SOURCE_DIR}
-B ${CMAKE_CURRENT_BINARY_DIR}/c2str
-G${CMAKE_GENERATOR}
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
-DBUILD_TCC=OFF
COMMAND
${CMAKE_COMMAND}
--build ${CMAKE_CURRENT_BINARY_DIR}/c2str
--target c2str.exe
WORKING_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR}/c2str
)
else()
add_executable(c2str.exe conftest.c)
target_compile_options(c2str.exe PRIVATE -DC2STR)
execute_process(
COMMAND git rev-parse --short HEAD
OUTPUT_VARIABLE TCC_GITHASH
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
if(TCC_GITHASH)
add_compile_definitions(TCC_GITHASH=\"${TCC_GITHASH}\")
add_custom_command(
TARGET
c2str.exe POST_BUILD
COMMAND
c2str.exe include/tccdefs.h tccdefs_.h
WORKING_DIRECTORY
${CMAKE_CURRENT_SOURCE_DIR}
)
endif()
endif()
set(TCC_SOURCES
libtcc.c
tccpp.c
tccgen.c
tccdbg.c
tccelf.c
tccasm.c
tccrun.c
)
if(BUILD_TCC)
file(READ ${CMAKE_CURRENT_SOURCE_DIR}/VERSION TCC_VERSION)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64|ARM64")
list(APPEND TCC_SOURCES
arm64-gen.c
arm64-link.c
arm64-asm.c
add_compile_definitions(TCC_VERSION=\"${TCC_VERSION}\")
execute_process(
COMMAND git rev-parse --short HEAD
OUTPUT_VARIABLE TCC_GITHASH
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|x64|amd64|AMD64")
list(APPEND TCC_SOURCES
x86_64-gen.c
x86_64-link.c
i386-asm.c
if(TCC_GITHASH)
add_compile_definitions(TCC_GITHASH=\"${TCC_GITHASH}\")
endif()
set(TCC_SOURCES
libtcc.c
tccpp.c
tccgen.c
tccdbg.c
tccelf.c
tccasm.c
tccrun.c
)
else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
if(APPLE)
list(APPEND TCC_SOURCES tccmacho.c)
endif()
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64|ARM64")
list(APPEND TCC_SOURCES
arm64-gen.c
arm64-link.c
arm64-asm.c
)
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64|x64|amd64|AMD64")
list(APPEND TCC_SOURCES
x86_64-gen.c
x86_64-link.c
i386-asm.c
)
else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
if(WIN32)
list(APPEND TCC_SOURCES tccpe.c)
endif()
if(APPLE)
list(APPEND TCC_SOURCES tccmacho.c)
endif()
add_executable(c2str.exe conftest.c)
target_compile_options(c2str.exe PRIVATE -DC2STR)
if(WIN32)
list(APPEND TCC_SOURCES tccpe.c)
endif()
add_custom_command(
TARGET
c2str.exe POST_BUILD
COMMAND
c2str.exe include/tccdefs.h tccdefs_.h
WORKING_DIRECTORY
add_library(tcc STATIC ${TCC_SOURCES})
if(BUILD_C2STR)
add_dependencies(tcc c2str.exe)
endif()
target_include_directories(tcc PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
)
add_library(tcc STATIC ${TCC_SOURCES})
add_custom_command(
TARGET
tcc PRE_BUILD
COMMAND
${CMAKE_COMMAND} -E touch config.h
WORKING_DIRECTORY
${CMAKE_CURRENT_SOURCE_DIR}
)
add_dependencies(tcc c2str.exe)
target_include_directories(tcc PUBLIC
${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/include
)
${CMAKE_CURRENT_SOURCE_DIR}/include
)
endif()

View File

@@ -400,7 +400,7 @@ check_package_manager() {
pm="brew"
;;
linux)
if [ -f "$(which apt)" ]; then
if [ -f "$(which apt-get)" ]; then
pm="apt"
elif [ -f "$(which dnf)" ]; then
pm="dnf"
@@ -549,7 +549,7 @@ check_ulimit() {
package_manager() {
case "$pm" in
apt)
execute_sudo apt "$@"
execute_sudo apt-get "$@"
;;
dnf)
case "$distro" in
@@ -598,6 +598,7 @@ install_packages() {
package_manager install \
--yes \
--no-install-recommends \
--fix-missing \
"$@"
;;
dnf)
@@ -917,8 +918,8 @@ install_llvm() {
apk)
# alpine doesn't have a lld19 package on 3.21 atm so use bare one for now
install_packages \
"llvm$(llvm_version)" \
"clang$(llvm_version)" \
"llvm$(llvm_version)-dev" \
"clang$(llvm_version)-dev" \
"scudo-malloc" \
"lld"
;;
@@ -994,6 +995,7 @@ install_gcc() {
execute_sudo ln -sf $(which ld.lld-$llvm_v) /usr/bin/ld
execute_sudo ln -sf $(which clang) /usr/bin/cc
execute_sudo ln -sf $(which clang++) /usr/bin/c++
execute_sudo ln -sf $(which llvm-symbolizer-$llvm_v) /usr/bin/llvm-symbolizer
}
install_ccache() {

View File

@@ -25,17 +25,23 @@ import {
} from "node:fs";
import { readFile } from "node:fs/promises";
import { userInfo } from "node:os";
import { basename, dirname, join, relative, sep } from "node:path";
import { basename, dirname, join, relative, sep, extname } from "node:path";
import { parseArgs } from "node:util";
import {
getAbi,
getAbiVersion,
getArch,
getBranch,
getBuildLabel,
getBuildUrl,
getCommit,
getDistro,
getDistroVersion,
getEnv,
getFileUrl,
getHostname,
getLoggedInUserCountOrDetails,
getOs,
getSecret,
getShell,
getWindowsExitReason,
@@ -156,7 +162,116 @@ if (options["quiet"]) {
}
/**
*
* @typedef {Object} TestExpectation
* @property {string} filename
* @property {string[]} expectations
* @property {string[] | undefined} bugs
* @property {string[] | undefined} modifiers
* @property {string | undefined} comment
*/
/**
* @returns {TestExpectation[]}
*/
function getTestExpectations() {
const expectationsPath = join(cwd, "test", "expectations.txt");
if (!existsSync(expectationsPath)) {
return [];
}
const lines = readFileSync(expectationsPath, "utf-8").split(/\r?\n/);
/** @type {TestExpectation[]} */
const expectations = [];
for (const line of lines) {
const content = line.trim();
if (!content || content.startsWith("#")) {
continue;
}
let comment;
const commentIndex = content.indexOf("#");
let cleanLine = content;
if (commentIndex !== -1) {
comment = content.substring(commentIndex + 1).trim();
cleanLine = content.substring(0, commentIndex).trim();
}
let modifiers = [];
let remaining = cleanLine;
let modifierMatch = remaining.match(/^\[(.*?)\]/);
if (modifierMatch) {
modifiers = modifierMatch[1].trim().split(/\s+/);
remaining = remaining.substring(modifierMatch[0].length).trim();
}
let expectationValues = ["Skip"];
const expectationMatch = remaining.match(/\[(.*?)\]$/);
if (expectationMatch) {
expectationValues = expectationMatch[1].trim().split(/\s+/);
remaining = remaining.substring(0, remaining.length - expectationMatch[0].length).trim();
}
const filename = remaining.trim();
if (filename) {
expectations.push({
filename,
expectations: expectationValues,
bugs: undefined,
modifiers: modifiers.length ? modifiers : undefined,
comment,
});
}
}
return expectations;
}
/**
* @param {string} testPath
* @returns {string[]}
*/
function getTestModifiers(testPath) {
const ext = extname(testPath);
const filename = basename(testPath, ext);
const modifiers = filename.split("-").filter(value => value !== "bun");
const os = getOs();
const arch = getArch();
modifiers.push(os, arch, `${os}-${arch}`);
const distro = getDistro();
if (distro) {
modifiers.push(distro, `${os}-${distro}`, `${os}-${arch}-${distro}`);
const distroVersion = getDistroVersion();
if (distroVersion) {
modifiers.push(
distroVersion,
`${distro}-${distroVersion}`,
`${os}-${distro}-${distroVersion}`,
`${os}-${arch}-${distro}-${distroVersion}`,
);
}
}
const abi = getAbi();
if (abi) {
modifiers.push(abi, `${os}-${abi}`, `${os}-${arch}-${abi}`);
const abiVersion = getAbiVersion();
if (abiVersion) {
modifiers.push(
abiVersion,
`${abi}-${abiVersion}`,
`${os}-${abi}-${abiVersion}`,
`${os}-${arch}-${abi}-${abiVersion}`,
);
}
}
return modifiers.map(value => value.toUpperCase());
}
/**
* @returns {Promise<TestResult[]>}
*/
async function runTests() {
@@ -168,10 +283,14 @@ async function runTests() {
}
!isQuiet && console.log("Bun:", execPath);
const expectations = getTestExpectations();
const modifiers = getTestModifiers(execPath);
!isQuiet && console.log("Modifiers:", modifiers);
const revision = getRevision(execPath);
!isQuiet && console.log("Revision:", revision);
const tests = getRelevantTests(testsPath);
const tests = getRelevantTests(testsPath, modifiers, expectations);
!isQuiet && console.log("Running tests:", tests.length);
/** @type {VendorTest[] | undefined} */
@@ -693,8 +812,8 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) {
BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0",
BUN_INSTALL_CACHE_DIR: tmpdirPath,
SHELLOPTS: isWindows ? "igncr" : undefined, // ignore "\r" on Windows
// Used in Node.js tests.
TEST_TMPDIR: tmpdirPath,
ASAN_OPTIONS: "allow_user_segv_handler=1",
TEST_TMPDIR: tmpdirPath, // Used in Node.js tests.
};
if (env) {
@@ -1072,9 +1191,6 @@ function isHidden(path) {
return /node_modules|node.js/.test(dirname(path)) || /^\./.test(basename(path));
}
/** Files with these extensions are not treated as test cases */
const IGNORED_EXTENSIONS = new Set([".md"]);
/**
* @param {string} cwd
* @returns {string[]}
@@ -1084,13 +1200,14 @@ function getTests(cwd) {
const dirname = join(cwd, path);
for (const entry of readdirSync(dirname, { encoding: "utf-8", withFileTypes: true })) {
const { name } = entry;
const ext = name.slice(name.lastIndexOf("."));
const filename = join(path, name);
if (isHidden(filename) || IGNORED_EXTENSIONS.has(ext)) {
if (isHidden(filename)) {
continue;
}
if (entry.isFile() && isTest(filename)) {
yield filename;
if (entry.isFile()) {
if (isTest(filename)) {
yield filename;
}
} else if (entry.isDirectory()) {
yield* getFiles(cwd, filename);
}
@@ -1226,9 +1343,11 @@ async function getVendorTests(cwd) {
/**
* @param {string} cwd
* @param {string[]} testModifiers
* @param {TestExpectation[]} testExpectations
* @returns {string[]}
*/
function getRelevantTests(cwd) {
function getRelevantTests(cwd, testModifiers, testExpectations) {
let tests = getTests(cwd);
const availableTests = [];
const filteredTests = [];
@@ -1272,6 +1391,25 @@ function getRelevantTests(cwd) {
}
}
const skipExpectations = testExpectations
.filter(
({ modifiers, expectations }) =>
!modifiers?.length || testModifiers.some(modifier => modifiers?.includes(modifier)),
)
.map(({ filename }) => filename.replace("test/", ""));
if (skipExpectations.length) {
const skippedTests = availableTests.filter(testPath => skipExpectations.some(filter => isMatch(testPath, filter)));
if (skippedTests.length) {
for (const testPath of skippedTests) {
const index = availableTests.indexOf(testPath);
if (index !== -1) {
availableTests.splice(index, 1);
}
}
!isQuiet && console.log("Skipping tests:", skipExpectations, skippedTests.length, "/", availableTests.length);
}
}
const shardId = parseInt(options["shard"]);
const maxShards = parseInt(options["max-shards"]);
if (filters?.length) {
@@ -1368,13 +1506,17 @@ async function getExecPathFromBuildKite(target, buildId) {
await spawnSafe({
command: "buildkite-agent",
args,
timeout: 60000,
});
for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) {
if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) {
zipPath = join(releasePath, entry);
break downloadLoop;
}
zipPath = readdirSync(releasePath, { recursive: true, encoding: "utf-8" })
.filter(filename => /^bun.*\.zip$/i.test(filename))
.map(filename => join(releasePath, filename))
.sort((a, b) => b.includes("profile") - a.includes("profile"))
.at(0);
if (zipPath) {
break downloadLoop;
}
console.warn(`Waiting for ${target}.zip to be available...`);
@@ -1390,12 +1532,12 @@ async function getExecPathFromBuildKite(target, buildId) {
const releaseFiles = readdirSync(releasePath, { recursive: true, encoding: "utf-8" });
for (const entry of releaseFiles) {
const execPath = join(releasePath, entry);
if (/bun(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) {
if (/bun(?:-[a-z]+)?(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) {
return execPath;
}
}
console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`);
console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`, releaseFiles);
throw new Error(`Could not find executable from BuildKite: ${releasePath}`);
}

View File

@@ -2332,10 +2332,11 @@ function parseLevel(level) {
* @returns {Annotation}
*/
export function parseAnnotation(options, context) {
const cwd = (context?.["cwd"] || process.cwd()).replace(/\\/g, "/");
const source = options["source"];
const level = parseLevel(options["level"]);
const title = options["title"] || (source ? `${source} ${level}` : level);
const filename = options["filename"];
const path = options["filename"]?.replace(/\\/g, "/");
const line = parseInt(options["line"]) || undefined;
const column = parseInt(options["column"]) || undefined;
const content = options["content"];
@@ -2354,6 +2355,13 @@ export function parseAnnotation(options, context) {
relevantLines.push(line);
}
let filename;
if (path?.startsWith(cwd)) {
filename = path.slice(cwd.length + 1);
} else {
filename = path;
}
return {
source,
title,
@@ -2894,6 +2902,7 @@ const emojiMap = {
true: ["✅", "white_check_mark"],
false: ["❌", "x"],
debug: ["🐞", "bug"],
asan: ["🐛", "bug"],
assert: ["🔍", "mag"],
release: ["🏆", "trophy"],
gear: ["⚙️", "gear"],

View File

@@ -3058,14 +3058,15 @@ fn sendBuiltInNotFound(resp: anytype) void {
resp.end(message, true);
}
fn printMemoryLine(dev: *DevServer) void {
fn printMemoryLine(_: *DevServer) void {
if (!debug.isVisible()) return;
Output.prettyErrorln("<d>DevServer tracked {}, measured: {} ({}), process: {}<r>", .{
bun.fmt.size(dev.memoryCost(), .{}),
dev.allocation_scope.state.allocations.count(),
bun.fmt.size(dev.allocation_scope.state.total_memory_allocated, .{}),
bun.fmt.size(bun.sys.selfProcessMemoryUsage() orelse 0, .{}),
});
// FIXME: compilation error with asan build
// Output.prettyErrorln("<d>DevServer tracked {}, measured: {} ({}), process: {}<r>", .{
// bun.fmt.size(dev.memoryCost(), .{}),
// dev.allocation_scope.state.allocations.count(),
// bun.fmt.size(dev.allocation_scope.state.total_memory_allocated, .{}),
// bun.fmt.size(bun.sys.selfProcessMemoryUsage() orelse 0, .{}),
// });
}
const FileKind = enum(u2) {

View File

@@ -2177,7 +2177,7 @@ static JSValue constructStdioWriteStream(JSC::JSGlobalObject* globalObject, int
RETURN_IF_EXCEPTION(scope, {});
if (auto* exception = returnedException.get()) {
#if BUN_DEBUG
#if ASSERT_ENABLED
Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception);
#endif
scope.throwException(globalObject, exception->value());
@@ -2243,7 +2243,7 @@ static JSValue constructStdin(VM& vm, JSObject* processObject)
RETURN_IF_EXCEPTION(scope, {});
if (auto* exception = returnedException.get()) {
#if BUN_DEBUG
#if ASSERT_ENABLED
Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception);
#endif
scope.throwException(globalObject, exception->value());
@@ -2323,7 +2323,7 @@ static JSValue constructProcessChannel(VM& vm, JSObject* processObject)
RETURN_IF_EXCEPTION(scope, {});
if (auto* exception = returnedException.get()) {
#if BUN_DEBUG
#if ASSERT_ENABLED
Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception);
#endif
scope.throwException(globalObject, exception->value());

View File

@@ -1584,7 +1584,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject
JSC::JSValue codeValue = callFrame->argument(0);
RETURN_IF_EXCEPTION(scope, {});
#if BUN_DEBUG
#if ASSERT_ENABLED
if (!codeValue.isNumber()) {
JSC::throwTypeError(globalObject, scope, "First argument to $ERR_ must be a number"_s);
return {};
@@ -1593,7 +1593,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject
int code = codeValue.toInt32(globalObject);
#if BUN_DEBUG
#if ASSERT_ENABLED
if (code > Bun::NODE_ERROR_COUNT - 1 || code < 0) {
JSC::throwTypeError(globalObject, scope, "Invalid error code. Use $ERR_* constants"_s);
return {};

View File

@@ -22,7 +22,7 @@ JSC_DEFINE_HOST_FUNCTION(jsDollarLazy, (JSC::JSGlobalObject * lexicalGlobalObjec
{
JSC::JSValue target = callFrame->uncheckedArgument(0);
#if BUN_DEBUG
#if ASSERT_ENABLED
ASSERT_WITH_MESSAGE(target.isInt32(), "In call to $lazy: expected Int32, got %s", target.toWTFString(lexicalGlobalObject).utf8().data());
#endif

View File

@@ -201,6 +201,7 @@
#ifdef __APPLE__
#include <sys/sysctl.h>
#include <exception>
#elif defined(__linux__)
// for sysconf
#include <unistd.h>
@@ -266,6 +267,17 @@ extern "C" void JSCInitialize(const char* envp[], size_t envc, void (*onCrash)(c
std::set_terminate([]() { Zig__GlobalObject__onCrash(); });
WTF::initializeMainThread();
#if ASAN_ENABLED && OS(LINUX)
{
JSC::Options::AllowUnfinalizedAccessScope scope;
// ASAN interferes with JSC's signal handlers
JSC::Options::useWasmFaultSignalHandler() = false;
JSC::Options::useWasmFastMemory() = false;
}
#endif
JSC::initialize();
{
@@ -1997,7 +2009,7 @@ static inline std::optional<JSC::JSValue> invokeReadableStreamFunction(JSC::JSGl
auto scope = DECLARE_CATCH_SCOPE(vm);
auto callData = JSC::getCallData(function);
auto result = call(&lexicalGlobalObject, function, callData, thisValue, arguments);
#if BUN_DEBUG
#if ASSERT_ENABLED
if (scope.exception()) {
Bun__reportError(&lexicalGlobalObject, JSValue::encode(scope.exception()));
}
@@ -4053,7 +4065,7 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j
BunString moduleNameZ;
String moduleName = moduleNameValue->value(globalObject);
#if BUN_DEBUG
#if ASSERT_ENABLED
auto startRefCount = moduleName.impl()->refCount();
#endif
if (moduleName.startsWith("file://"_s)) {

View File

@@ -133,7 +133,7 @@
#include "wtf-bindings.h"
#if OS(DARWIN)
#if BUN_DEBUG
#if ASSERT_ENABLED
#if !__has_feature(address_sanitizer)
#include <malloc/malloc.h>
#define IS_MALLOC_DEBUGGING_ENABLED 1
@@ -2654,7 +2654,7 @@ JSC::EncodedJSValue JSObjectCallAsFunctionReturnValueHoldingAPILock(JSContextRef
JSC::JSLockHolder lock(vm);
#if BUN_DEBUG
#if ASSERT_ENABLED
// This is a redundant check, but we add it to make the error message clearer.
ASSERT_WITH_MESSAGE(!vm.isCollectorBusyOnCurrentThread(), "Cannot call function inside a finalizer or while GC is running on same thread.");
#endif
@@ -6431,7 +6431,7 @@ extern "C" EncodedJSValue Bun__JSObject__getCodePropertyVMInquiry(JSC::JSGlobalO
return JSValue::encode(slot.getPureResult());
}
#if BUN_DEBUG
#if ASSERT_ENABLED
CPP_DECL const char* Bun__CallFrame__describeFrame(JSC::CallFrame* callFrame)
{
return callFrame->describeFrame();

View File

@@ -59,7 +59,7 @@ public:
accessor->finishCreation(vm, value, finalizer_hint, env, callback);
#if BUN_DEBUG
#if ASSERT_ENABLED
if (auto* callFrame = vm.topCallFrame) {
auto origin = callFrame->callerSourceOrigin(vm);
accessor->sourceOriginURL = origin.string();
@@ -97,7 +97,7 @@ public:
NapiFinalizer m_finalizer;
napi_env m_env;
#if BUN_DEBUG
#if ASSERT_ENABLED
String sourceOriginURL = String();
unsigned sourceOriginLine = 0;
unsigned sourceOriginColumn = 0;

View File

@@ -245,4 +245,9 @@ extern "C" void* Bun__StackCheck__getMaxStack()
return stackBoundsForCurrentThread.end();
}
extern "C" void WTF__DumpStackTrace(void** stack, size_t stack_count)
{
WTFPrintBacktrace({ stack, stack_count });
}
}

View File

@@ -50,7 +50,7 @@
#include <JavaScriptCore/ControlFlowProfiler.h>
#if OS(DARWIN)
#if BUN_DEBUG
#if ASSERT_ENABLED
#if !__has_feature(address_sanitizer)
#include <malloc/malloc.h>
#define IS_MALLOC_DEBUGGING_ENABLED 1

View File

@@ -5906,9 +5906,6 @@ pub const NodeFS = struct {
Maybe(Return.Utimes).success;
}
bun.assert(args.mtime.nsec <= 1e9);
bun.assert(args.atime.nsec <= 1e9);
return switch (Syscall.utimens(
args.path.sliceZ(&this.sync_error_buf),
args.atime,

View File

@@ -10767,11 +10767,11 @@ pub const LinkerContext = struct {
var worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c));
defer worker.unget();
const prev_action = if (Environment.isDebug) bun.crash_handler.current_action;
defer if (Environment.isDebug) {
const prev_action = if (Environment.show_crash_trace) bun.crash_handler.current_action;
defer if (Environment.show_crash_trace) {
bun.crash_handler.current_action = prev_action;
};
if (Environment.isDebug) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{
if (Environment.show_crash_trace) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{
.chunk = ctx.chunk,
.context = ctx.c,
.part_range = &part_range.part_range,
@@ -10923,17 +10923,17 @@ pub const LinkerContext = struct {
var worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c));
defer worker.unget();
const prev_action = if (Environment.isDebug) bun.crash_handler.current_action;
defer if (Environment.isDebug) {
const prev_action = if (Environment.show_crash_trace) bun.crash_handler.current_action;
defer if (Environment.show_crash_trace) {
bun.crash_handler.current_action = prev_action;
};
if (Environment.isDebug) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{
if (Environment.show_crash_trace) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{
.chunk = ctx.chunk,
.context = ctx.c,
.part_range = &part_range.part_range,
} };
if (Environment.isDebug) {
if (Environment.show_crash_trace) {
const path = ctx.c.parse_graph.input_files.items(.source)[part_range.part_range.source_index.get()].path;
if (bun.CLI.debug_flags.hasPrintBreakpoint(path)) {
@breakpoint();

View File

@@ -77,7 +77,7 @@ pub const Cli = struct {
pub threadlocal var is_main_thread: bool = false;
};
pub const debug_flags = if (Environment.isDebug) struct {
pub const debug_flags = if (Environment.show_crash_trace) struct {
var resolve_breakpoints: []const []const u8 = &.{};
var print_breakpoints: []const []const u8 = &.{};
@@ -183,7 +183,7 @@ pub const Arguments = struct {
pub const ParamType = clap.Param(clap.Help);
const base_params_ = (if (Environment.isDebug) debug_params else [_]ParamType{}) ++ [_]ParamType{
const base_params_ = (if (Environment.show_crash_trace) debug_params else [_]ParamType{}) ++ [_]ParamType{
clap.parseParam("--env-file <STR>... Load environment variables from the specified file(s)") catch unreachable,
clap.parseParam("--cwd <STR> Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable,
clap.parseParam("-c, --config <PATH>? Specify path to Bun config file. Default <d>$cwd<r>/bunfig.toml") catch unreachable,
@@ -1280,7 +1280,7 @@ pub const Arguments = struct {
}
}
if (Environment.isDebug) {
if (Environment.show_crash_trace) {
debug_flags.resolve_breakpoints = args.options("--breakpoint-resolve");
debug_flags.print_breakpoints = args.options("--breakpoint-print");
}

View File

@@ -148,7 +148,7 @@ JSC_DEFINE_JIT_OPERATION(${DOMJITName(
CallFrame* callFrame = DECLARE_CALL_FRAME(vm);
IGNORE_WARNINGS_END
JSC::JITOperationPrologueCallFrameTracer tracer(vm, callFrame);
#if BUN_DEBUG
#if ASSERT_ENABLED
${jsClassName}* wrapper = reinterpret_cast<${jsClassName}*>(thisValue);
JSC::EncodedJSValue result = ${DOMJITName(symName)}(wrapper->wrapped(), lexicalGlobalObject${retArgs});
JSValue decoded = JSValue::decode(result);
@@ -1187,7 +1187,7 @@ JSC_DEFINE_HOST_FUNCTION(${symbolName(typeName, name)}Callback, (JSGlobalObject
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
#if BUN_DEBUG
#if ASSERT_ENABLED
/** View the file name of the JS file that called this function
* from a debugger */
SourceOrigin sourceOrigin = callFrame->callerSourceOrigin(vm);
@@ -1412,7 +1412,7 @@ function generateClassHeader(typeName, obj: ClassDefinition) {
}
function domJITTypeCheckFields(proto, klass) {
var output = "#if BUN_DEBUG\n";
var output = "#if ASSERT_ENABLED\n";
for (const name in proto) {
const { DOMJIT, fn } = proto[name];
if (!DOMJIT) continue;

View File

@@ -117,7 +117,7 @@ pub const Action = union(enum) {
print: []const u8,
/// bun.bundle_v2.LinkerContext.generateCompileResultForJSChunk
bundle_generate_chunk: if (bun.Environment.isDebug) struct {
bundle_generate_chunk: if (bun.Environment.show_crash_trace) struct {
context: *const anyopaque, // unfortunate dependency loop workaround
chunk: *const bun.bundle_v2.Chunk,
part_range: *const bun.bundle_v2.PartRange,
@@ -127,7 +127,7 @@ pub const Action = union(enum) {
}
} else void,
resolver: if (bun.Environment.isDebug) struct {
resolver: if (bun.Environment.show_crash_trace) struct {
source_dir: []const u8,
import_path: []const u8,
kind: bun.ImportKind,
@@ -140,7 +140,7 @@ pub const Action = union(enum) {
.parse => |path| try writer.print("parsing {s}", .{path}),
.visit => |path| try writer.print("visiting {s}", .{path}),
.print => |path| try writer.print("printing {s}", .{path}),
.bundle_generate_chunk => |data| if (bun.Environment.isDebug) {
.bundle_generate_chunk => |data| if (bun.Environment.show_crash_trace) {
try writer.print(
\\generating bundler chunk
\\ chunk entry point: {?s}
@@ -159,7 +159,7 @@ pub const Action = union(enum) {
},
);
},
.resolver => |res| if (bun.Environment.isDebug) {
.resolver => |res| if (bun.Environment.show_crash_trace) {
try writer.print("resolving {s} from {s} ({s})", .{
res.import_path,
res.source_dir,
@@ -217,7 +217,7 @@ pub fn crashHandler(
//
// To make the release-mode behavior easier to demo, debug mode
// checks for this CLI flag.
const debug_trace = bun.Environment.isDebug and check_flag: {
const debug_trace = bun.Environment.show_crash_trace and check_flag: {
for (bun.argv) |arg| {
if (bun.strings.eqlComptime(arg, "--debug-crash-handler-use-trace-string")) {
break :check_flag false;
@@ -482,7 +482,7 @@ pub fn crashHandler(
/// This is called when `main` returns a Zig error.
/// We don't want to treat it as a crash under certain error codes.
pub fn handleRootError(err: anyerror, error_return_trace: ?*std.builtin.StackTrace) noreturn {
var show_trace = bun.Environment.isDebug;
var show_trace = bun.Environment.show_crash_trace;
switch (err) {
error.OutOfMemory => bun.outOfMemory(),
@@ -722,7 +722,7 @@ pub fn handleRootError(err: anyerror, error_return_trace: ?*std.builtin.StackTra
else => {
Output.errGeneric(
if (bun.Environment.isDebug)
if (bun.Environment.show_crash_trace)
"'main' returned <red>error.{s}<r>"
else
"An internal error occurred (<red>{s}<r>)",
@@ -815,7 +815,7 @@ fn handleSegfaultPosix(sig: i32, info: *const std.posix.siginfo_t, _: ?*const an
var did_register_sigaltstack = false;
var sigaltstack: [512 * 1024]u8 = undefined;
pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void {
fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void {
if (act) |act_| {
if (!did_register_sigaltstack) {
var stack: std.c.stack_t = .{
@@ -840,6 +840,7 @@ pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void {
var windows_segfault_handle: ?windows.HANDLE = null;
pub fn resetOnPosix() void {
if (!bun.Environment.enable_asan) return;
var act = std.posix.Sigaction{
.handler = .{ .sigaction = handleSegfaultPosix },
.mask = std.posix.empty_sigset,
@@ -850,6 +851,7 @@ pub fn resetOnPosix() void {
pub fn init() void {
if (!enable) return;
if (!bun.Environment.enable_asan) return;
switch (bun.Environment.os) {
.windows => {
windows_segfault_handle = windows.kernel32.AddVectoredExceptionHandler(0, handleSegfaultWindows);
@@ -1370,6 +1372,9 @@ fn isReportingEnabled() bool {
if (bun.Environment.isDebug)
return false;
if (bun.Environment.enable_asan)
return false;
// Honor DO_NOT_TRACK
if (!bun.analytics.isEnabled())
return false;
@@ -1590,12 +1595,14 @@ pub inline fn handleErrorReturnTrace(err: anyerror, maybe_trace: ?*std.builtin.S
handleErrorReturnTraceExtra(err, maybe_trace, false);
}
extern "c" fn WTF__DumpStackTrace(ptr: [*]usize, count: usize) void;
/// Version of the standard library dumpStackTrace that has some fallbacks for
/// cases where such logic fails to run.
pub fn dumpStackTrace(trace: std.builtin.StackTrace, limits: WriteStackTraceLimits) void {
Output.flush();
const stderr = std.io.getStdErr().writer();
if (!bun.Environment.isDebug) {
if (!bun.Environment.show_crash_trace) {
// debug symbols aren't available, lets print a tracestring
stderr.print("View Debug Trace: {}\n", .{TraceString{
.action = .view_trace,
@@ -1621,6 +1628,8 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace, limits: WriteStackTraceLimi
.linux => {
// Linux doesnt seem to be able to decode it's own debug info.
// TODO(@paperclover): see if zig 0.14 fixes this
WTF__DumpStackTrace(trace.instruction_addresses.ptr, trace.instruction_addresses.len);
return;
},
else => {
// Assume debug symbol tooling is reliable.

View File

@@ -26,6 +26,7 @@ pub const isX86 = @import("builtin").target.cpu.arch.isX86();
pub const isX64 = @import("builtin").target.cpu.arch == .x86_64;
pub const isMusl = builtin.target.abi.isMusl();
pub const allow_assert = isDebug or isTest or std.builtin.Mode.ReleaseSafe == @import("builtin").mode;
pub const show_crash_trace = isDebug or isTest or enable_asan;
/// All calls to `@export` should be gated behind this check, so that code
/// generators that compile Zig code know not to reference and compile a ton of
@@ -49,6 +50,7 @@ pub const canary_revision = if (is_canary) build_options.canary_revision else ""
pub const dump_source = isDebug and !isTest;
pub const base_path = build_options.base_path;
pub const enable_logs = build_options.enable_logs;
pub const enable_asan = build_options.enable_asan;
pub const codegen_path = build_options.codegen_path;
pub const codegen_embed = build_options.codegen_embed;

View File

@@ -777,7 +777,7 @@ fn ScopedLogger(comptime tagname: []const u8, comptime disabled: bool) type {
/// BUN_DEBUG_foo=1
/// To enable all logs, set the environment variable
/// BUN_DEBUG_ALL=1
pub fn log(comptime fmt: string, args: anytype) void {
pub fn log(comptime fmt: string, args: anytype) callconv(bun.callconv_inline) void {
if (!source_set) return;
if (fmt.len == 0 or fmt[fmt.len - 1] != '\n') {
return log(fmt ++ "\n", args);
@@ -831,7 +831,7 @@ fn ScopedLogger(comptime tagname: []const u8, comptime disabled: bool) type {
};
}
pub fn scoped(comptime tag: anytype, comptime disabled: bool) LogFunction {
pub fn scoped(comptime tag: anytype, comptime disabled: bool) callconv(bun.callconv_inline) LogFunction {
return Scoped(
tag,
disabled,

119
test/expectations.txt Normal file
View File

@@ -0,0 +1,119 @@
# Documentation: http://trac.webkit.org/wiki/TestExpectations
# Format: [modifier] test-name [[ expectations ]] [# comment]
# Tests that are broken
test/cli/create/create-jsx.test.ts [ FAIL ] # false > react spa (no tailwind) > build
test/integration/bun-types/bun-types.test.ts [ FAIL ] # @types/bun integration test > checks without lib.dom.d.ts
test/bundler/native-plugin.test.ts [ FAIL ] # prints name when plugin crashes
test/cli/install/bun-run.test.ts [ FAIL ] # should pass arguments correctly in scripts
test/cli/run/run-crash-handler.test.ts [ FAIL ] # automatic crash reporter > segfault should report
test/regression/issue/17454/destructure_string.test.ts [ FAIL ] # destructure string does not become string
# Tests that are flaky
test/js/bun/spawn/spawn-maxbuf.test.ts [ FLAKY ]
# Tests skipped due to different log/line outputs
[ ASAN ] test/js/web/console/console-log.test.ts [ SKIP ] # log line mismatch
[ ASAN ] test/js/bun/util/reportError.test.ts [ SKIP ] # log line mismatch
[ ASAN ] test/js/node/child_process/child_process.test.ts [ SKIP ] # Unexpected identifier "WARNING"
[ ASAN ] test/js/bun/shell/bunshell.test.ts [ SKIP ] # bunshell > quiet > basic
[ ASAN ] test/bundler/cli.test.ts [ SKIP ] # debug logs
[ ASAN ] test/cli/install/bun-install.test.ts [ FLAKY ] # destroy(Closer) logs
# Tests failed due to ASAN
[ ASAN ] test/js/node/test/parallel/test-common-gc.js [ FAIL ]
[ ASAN ] test/js/bun/spawn/spawn-streaming-stdin.test.ts [ FAIL ]
[ ASAN ] test/regression/issue/17454/destructure_string.test.ts [ FAIL ]
[ ASAN ] test/js/node/test/parallel/test-http-server-connections-checking-leak.js [ FAIL ]
[ ASAN ] test/js/node/test/parallel/test-zlib-invalid-input-memory.js [ FAIL ]
[ ASAN ] test/js/node/test/parallel/test-https-server-connections-checking-leak.js [ FAIL ]
[ ASAN ] test/js/bun/sqlite/sqlite.test.js [ FAIL ] # can continue to use existing statements after database has been GC'd
[ ASAN ] test/bake/dev/stress.test.ts [ FLAKY ] # DEV:stress-1: crash #18910
# Tests failed due to ASAN: attempting free on address which was not malloc()-ed
[ ASAN ] test/js/node/test/parallel/test-http2-removed-header-stays-removed.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-invalidheaderfields-client.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-writehead-array.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-writehead.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-options-server-request.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-write-empty-string.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-invalidheaderfield.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-options-server-response.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-server-set-header.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-connect-options.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-end.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-utimes.js [ CRASH ]
[ ASAN ] test/js/node/worker_threads/worker_threads.test.ts [ CRASH ] # After: threadId module and worker property is consistent
[ ASAN ] test/js/node/worker_threads/worker_destruction.test.ts [ CRASH ] # After: bun closes cleanly when Bun.connect is used in a Worker that is terminating
[ ASAN ] test/integration/vite-build/vite-build.test.ts [ CRASH ]
[ ASAN ] test/integration/next-pages/test/dev-server-ssr-100.test.ts [ CRASH ]
[ ASAN ] test/integration/next-pages/test/next-build.test.ts [ CRASH ]
[ ASAN ] test/js/third_party/next-auth/next-auth.test.ts [ CRASH ]
[ ASAN ] test/js/third_party/astro/astro-post.test.js [ CRASH ]
[ ASAN ] test/js/bun/wasm/wasi.test.js [ CRASH ]
[ ASAN ] test/regression/issue/ctrl-c.test.ts [ CRASH ]
[ ASAN ] test/cli/install/bun-repl.test.ts [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-intl.js [ CRASH ]
[ ASAN ] test/js/node/v8/v8-date-parser.test.js [ CRASH ]
[ ASAN ] test/cli/hot/hot.test.ts [ CRASH ]
[ ASAN ] test/js/node/watch/fs.watch.test.ts [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-watch.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-update-file.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-linux-parallel-remove.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-update-file.js [ CRASH ]
[ ASAN ] test/js/node/test/parallel/test-fs-promises-watch.js [ CRASH ]
[ ASAN ] test/cli/hot/watch.test.ts [ CRASH ]
[ ASAN ] test/js/bun/resolve/load-same-js-file-a-lot.test.ts [ CRASH ]
[ ASAN ] test/js/third_party/es-module-lexer/es-module-lexer.test.ts [ CRASH ]
[ ASAN ] test/bundler/esbuild/default.test.ts [ CRASH ]
[ ASAN ] test/bundler/bundler_edgecase.test.ts [ CRASH ] # After: edgecase/UsingWithSixImports
[ ASAN ] test/bundler/bundler_loader.test.ts [ CRASH ] # bun/wasm-is-copied-to-outdir
[ ASAN ] test/bundler/bundler_npm.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/sourcemap.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/hot.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/bundle.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/esm.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/css.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/html.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/react-spa.test.ts [ CRASH ]
[ ASAN ] test/bake/dev/ecosystem.test.ts [ CRASH ]
# Tests failed due to ASAN: SEGV on unknown address
[ ASAN ] test/integration/next-pages/test/dev-server.test.ts [ CRASH ]
# Tests failed due to ASAN: heap-use-after-free
[ ASAN ] test/js/first_party/ws/ws.test.ts [ CRASH ]
# Tests failed due to ASAN: use-after-poison
[ ASAN ] test/js/node/test/parallel/test-worker-unref-from-message-during-exit.js [ CRASH ]
[ ASAN ] test/napi/napi.test.ts [ CRASH ] # can throw an exception from an async_complete_callback
[ ASAN ] test/js/node/http/node-http-uaf.test.ts [ CRASH ] # should not crash on abort (node-http-uaf-fixture.ts)
# Tests failed due to ASAN: unknown-crash
[ ASAN ] test/js/sql/tls-sql.test.ts [ CRASH ] # After: Throws on illegal transactions
# Tests failed due to ASAN: assertion failed
[ ASAN ] test/js/node/test/parallel/test-string-decoder-fuzz.js [ CRASH ] # ASSERTION FAILED: joinedLength
# Tests timed out due to ASAN
[ ASAN ] test/js/node/util/test-aborted.test.ts [ TIMEOUT ] # aborted with gc cleanup
[ ASAN ] test/js/node/test/parallel/test-primitive-timer-leak.js [ TIMEOUT ]
[ ASAN ] test/js/bun/spawn/spawn.test.ts [ TIMEOUT ]
[ ASAN ] test/cli/inspect/inspect.test.ts [ TIMEOUT ]
[ ASAN ] test/js/node/test/parallel/test-gc-http-client-connaborted.js [ TIMEOUT ]
[ ASAN ] test/cli/inspect/BunFrontendDevServer.test.ts [ TIMEOUT ]
# Tests failed due to memory leaks
[ ASAN ] test/js/node/url/pathToFileURL.test.ts [ LEAK ] # pathToFileURL doesn't leak memory
[ ASAN ] test/js/node/fs/abort-signal-leak-read-write-file.test.ts [ LEAK ] # should not leak memory with already aborted signals
[ ASAN ] test/js/web/streams/streams-leak.test.ts [ LEAK ] # Absolute memory usage remains relatively constant when reading and writing to a pipe
[ ASAN ] test/js/web/fetch/fetch-leak.test.ts [ LEAK ]
[ ASAN ] test/cli/run/require-cache.test.ts [ LEAK ] # files transpiled and loaded don't leak file paths > via require()
[ ASAN ] test/js/bun/spawn/spawn-pipe-leak.test.ts [ LEAK ]
[ ASAN ] test/js/node/http2/node-http2.test.js [ LEAK ] # should not leak memory
[ ASAN ] test/js/bun/http/req-url-leak.test.ts [ LEAK ] # req.url doesn't leak memory
[ ASAN ] test/js/bun/io/bun-write-leak.test.ts [ LEAK ] # Bun.write should not leak the output data