From d8a69d6823feb60658e3d693bd741f747ea86c45 Mon Sep 17 00:00:00 2001 From: Jarred Sumner Date: Fri, 2 May 2025 10:44:09 -0700 Subject: [PATCH] Enable ASAN with linux-x64-asan in CI --- .buildkite/ci.mjs | 115 ++++++------- build.zig | 1 + cmake/Globals.cmake | 5 + cmake/Options.cmake | 26 +-- cmake/targets/BuildBun.cmake | 22 ++- cmake/tools/SetupBuildkite.cmake | 15 ++ cmake/tools/SetupWebKit.cmake | 4 +- cmake/tools/SetupZig.cmake | 6 +- docs/project/asan.md | 124 ++++++++++++++ package.json | 1 + packages/bun-usockets/src/crypto/openssl.c | 4 +- scripts/bootstrap.sh | 8 +- scripts/machine.mjs | 4 +- scripts/runner.node.mjs | 182 ++++++++++++++++++--- scripts/utils.mjs | 11 +- src/bun.js/bindings/BunProcess.cpp | 6 +- src/bun.js/bindings/ErrorCode.cpp | 4 +- src/bun.js/bindings/JS2Native.cpp | 2 +- src/bun.js/bindings/ZigGlobalObject.cpp | 15 +- src/bun.js/bindings/bindings.cpp | 6 +- src/bun.js/bindings/napi_external.h | 4 +- src/bun.js/bindings/wtf-bindings.cpp | 4 + src/bun.js/modules/BunJSCModule.h | 2 +- src/bun.js/node/node_fs.zig | 3 - src/bundler/bundle_v2.zig | 14 +- src/cli.zig | 6 +- src/crash_handler.zig | 26 ++- src/env.zig | 3 +- src/fs.zig | 2 - src/resolver/resolver.zig | 8 +- test/expectations.txt | 120 ++++++++++++++ test/harness.ts | 6 + 32 files changed, 615 insertions(+), 144 deletions(-) create mode 100644 docs/project/asan.md create mode 100644 test/expectations.txt diff --git a/.buildkite/ci.mjs b/.buildkite/ci.mjs index 56845f367b..4fd429779f 100755 --- a/.buildkite/ci.mjs +++ b/.buildkite/ci.mjs @@ -35,7 +35,7 @@ import { * @typedef {"musl"} Abi * @typedef {"debian" | "ubuntu" | "alpine" | "amazonlinux"} Distro * @typedef {"latest" | "previous" | "oldest" | "eol"} Tier - * @typedef {"release" | "assert" | "debug"} Profile + * @typedef {"release" | "assert" | "debug" | "asan"} Profile */ /** @@ -107,6 +107,7 @@ const buildPlatforms = [ { os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] }, + { os: "linux", arch: "x64", profile: "asan", distro: "amazonlinux", release: "2023", features: ["docker"] }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21" }, { os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.21" }, @@ -125,14 +126,12 @@ const testPlatforms = [ { os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" }, { os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" }, { os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" }, + { os: "linux", arch: "x64", profile: "asan", distro: "debian", release: "12", tier: "latest" }, { os: "linux", arch: "aarch64", distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "aarch64", distro: "ubuntu", release: "22.04", tier: "previous" }, { os: "linux", arch: "aarch64", distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "x64", distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "x64", distro: "ubuntu", release: "22.04", tier: "previous" }, { os: "linux", arch: "x64", distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "24.04", tier: "latest" }, - { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "22.04", tier: "previous" }, { os: "linux", arch: "x64", baseline: true, distro: "ubuntu", release: "20.04", tier: "oldest" }, { os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, { os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.21", tier: "latest" }, @@ -379,30 +378,32 @@ function getTestAgent(platform, options) { * @returns {Record} */ function getBuildEnv(target, options) { - const { profile, baseline, abi } = target; - const release = !profile || profile === "release"; + const { baseline, abi } = target; const { canary } = options; const revision = typeof canary === "number" ? canary : 1; - const isMusl = abi === "musl"; - - let CMAKE_BUILD_TYPE = release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo"; - if (isMusl && release) { - CMAKE_BUILD_TYPE = "MinSizeRel"; - } - return { - CMAKE_BUILD_TYPE, ENABLE_BASELINE: baseline ? "ON" : "OFF", ENABLE_CANARY: revision > 0 ? "ON" : "OFF", CANARY_REVISION: revision, - ENABLE_ASSERTIONS: release ? "OFF" : "ON", - ENABLE_LOGS: release ? "OFF" : "ON", - ABI: isMusl ? "musl" : undefined, + ABI: abi === "musl" ? "musl" : undefined, + CMAKE_VERBOSE_MAKEFILE: "ON", CMAKE_TLS_VERIFY: "0", }; } +/** + * @param {Target} target + * @param {PipelineOptions} options + * @returns {string} + */ +function getBuildCommand(target, options) { + const { profile } = target; + + const label = profile || "release"; + return `bun run build:${label}`; +} + /** * @param {Platform} platform * @param {PipelineOptions} options @@ -416,7 +417,7 @@ function getBuildVendorStep(platform, options) { retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform, options), - command: "bun run build:ci --target dependencies", + command: `${getBuildCommand(platform, options)} --target dependencies`, }; } @@ -426,6 +427,7 @@ function getBuildVendorStep(platform, options) { * @returns {Step} */ function getBuildCppStep(platform, options) { + const command = getBuildCommand(platform, options); return { key: `${getTargetKey(platform)}-build-cpp`, label: `${getTargetLabel(platform)} - build-cpp`, @@ -439,7 +441,7 @@ function getBuildCppStep(platform, options) { // We used to build the C++ dependencies and bun in seperate steps. // However, as long as the zig build takes longer than both sequentially, // it's cheaper to run them in the same step. Can be revisited in the future. - command: ["bun run build:ci --target bun", "bun run build:ci --target dependencies"], + command: [`${command} --target bun`, `${command} --target dependencies`], }; } @@ -473,7 +475,7 @@ function getBuildZigStep(platform, options) { retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform, options), - command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`, + command: `${getBuildCommand(platform, options)} --target bun-zig --toolchain ${toolchain}`, timeout_in_minutes: 35, }; } @@ -495,7 +497,7 @@ function getLinkBunStep(platform, options) { BUN_LINK_ONLY: "ON", ...getBuildEnv(platform, options), }, - command: "bun run build:ci --target bun", + command: `${getBuildCommand(platform, options)} --target bun`, }; } @@ -512,7 +514,7 @@ function getBuildBunStep(platform, options) { retry: getRetry(), cancel_on_build_failing: isMergeQueue(), env: getBuildEnv(platform, options), - command: "bun run build:ci", + command: getBuildCommand(platform, options), }; } @@ -531,7 +533,7 @@ function getBuildBunStep(platform, options) { * @returns {Step} */ function getTestBunStep(platform, options, testOptions = {}) { - const { os } = platform; + const { os, profile } = platform; const { buildId, unifiedTests, testFiles } = testOptions; const args = [`--step=${getTargetKey(platform)}-build-bun`]; @@ -555,6 +557,7 @@ function getTestBunStep(platform, options, testOptions = {}) { retry: getRetry(), cancel_on_build_failing: isMergeQueue(), parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10, + timeout_in_minutes: profile === "asan" ? 90 : 30, command: os === "windows" ? `node .\\scripts\\runner.node.mjs ${args.join(" ")}` @@ -628,17 +631,18 @@ function getReleaseStep(buildPlatforms, options) { } /** + * @param {Platform[]} buildPlatforms * @returns {Step} */ -function getBenchmarkStep() { +function getBenchmarkStep(buildPlatforms) { return { key: "benchmark", label: "📊", agents: { - queue: "build-zig", + queue: "build-image", }, - command: "bun .buildkite/scripts/upload-benchmark.ts", - depends_on: [`linux-x64-build-bun`], + depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`), + command: "node .buildkite/scripts/upload-benchmark.ts", }; } @@ -728,7 +732,6 @@ function getBenchmarkStep() { * @property {string | boolean} [buildImages] * @property {string | boolean} [publishImages] * @property {number} [canary] - * @property {Profile[]} [buildProfiles] * @property {Platform[]} [buildPlatforms] * @property {Platform[]} [testPlatforms] * @property {string[]} [testFiles] @@ -823,6 +826,10 @@ function getOptionsStep() { label: `${getEmoji("assert")} Release with Assertions`, value: "assert", }, + { + label: `${getEmoji("asan")} Release with ASAN`, + value: "asan", + }, { label: `${getEmoji("debug")} Debug`, value: "debug", @@ -965,6 +972,7 @@ async function getPipelineOptions() { ?.map(item => item.trim()) ?.filter(Boolean); + const buildProfiles = parseArray(options["build-profiles"]); const buildPlatformKeys = parseArray(options["build-platforms"]); const testPlatformKeys = parseArray(options["test-platforms"]); return { @@ -977,12 +985,11 @@ async function getPipelineOptions() { testFiles: parseArray(options["test-files"]), unifiedBuilds: parseBoolean(options["unified-builds"]), unifiedTests: parseBoolean(options["unified-tests"]), - buildProfiles: parseArray(options["build-profiles"]), buildPlatforms: buildPlatformKeys?.length - ? buildPlatformKeys.map(key => buildPlatformsMap.get(key)) + ? buildPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...buildPlatformsMap.get(key), profile }))) : Array.from(buildPlatformsMap.values()), testPlatforms: testPlatformKeys?.length - ? testPlatformKeys.map(key => testPlatformsMap.get(key)) + ? testPlatformKeys.flatMap(key => buildProfiles.map(profile => ({ ...testPlatformsMap.get(key), profile }))) : Array.from(testPlatformsMap.values()), dryRun: parseBoolean(options["dry-run"]), }; @@ -1017,7 +1024,6 @@ async function getPipelineOptions() { publishImages: parseOption(/\[(publish images?)\]/i), buildPlatforms: Array.from(buildPlatformsMap.values()), testPlatforms: Array.from(testPlatformsMap.values()), - buildProfiles: ["release"], }; } @@ -1040,7 +1046,7 @@ async function getPipeline(options = {}) { return; } - const { buildProfiles = [], buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options; + const { buildPlatforms = [], testPlatforms = [], buildImages, publishImages } = options; const imagePlatforms = new Map( buildImages || publishImages ? [...buildPlatforms, ...testPlatforms] @@ -1077,22 +1083,20 @@ async function getPipeline(options = {}) { if (!buildId) { steps.push( - ...buildPlatforms - .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) - .map(target => { - const imageKey = getImageKey(target); + ...buildPlatforms.map(target => { + const imageKey = getImageKey(target); - return getStepWithDependsOn( - { - key: getTargetKey(target), - group: getTargetLabel(target), - steps: unifiedBuilds - ? [getBuildBunStep(target, options)] - : [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)], - }, - imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined, - ); - }), + return getStepWithDependsOn( + { + key: getTargetKey(target), + group: getTargetLabel(target), + steps: unifiedBuilds + ? [getBuildBunStep(target, options)] + : [getBuildCppStep(target, options), getBuildZigStep(target, options), getLinkBunStep(target, options)], + }, + imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined, + ); + }), ); } @@ -1100,23 +1104,20 @@ async function getPipeline(options = {}) { const { skipTests, forceTests, unifiedTests, testFiles } = options; if (!skipTests || forceTests) { steps.push( - ...testPlatforms - .flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile }))) - .map(target => ({ - key: getTargetKey(target), - group: getTargetLabel(target), - steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })], - })), + ...testPlatforms.map(target => ({ + key: getTargetKey(target), + group: getTargetLabel(target), + steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })], + })), ); } } if (isMainBranch()) { steps.push(getReleaseStep(buildPlatforms, options)); + steps.push(getBenchmarkStep(buildPlatforms)); } - steps.push(getBenchmarkStep()); - /** @type {Map} */ const stepsByGroup = new Map(); diff --git a/build.zig b/build.zig index 3659c5936d..04afe96fe8 100644 --- a/build.zig +++ b/build.zig @@ -92,6 +92,7 @@ const BunBuildOptions = struct { opts.addOption([:0]const u8, "sha", b.allocator.dupeZ(u8, this.sha) catch @panic("OOM")); opts.addOption(bool, "baseline", this.isBaseline()); opts.addOption(bool, "enable_logs", this.enable_logs); + opts.addOption(bool, "enable_asan", this.enable_asan); opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version})); opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm); diff --git a/cmake/Globals.cmake b/cmake/Globals.cmake index 56e0a43544..2e01acdabc 100644 --- a/cmake/Globals.cmake +++ b/cmake/Globals.cmake @@ -425,6 +425,11 @@ function(register_command) list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH}) list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-profile.a) list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-profile.a.gz) + elseif(filename STREQUAL "libbun-asan.a") + list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${BUILD_PATH}/codegen) + list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} rm -r ${CACHE_PATH}) + list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} gzip -1 libbun-asan.a) + list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload libbun-asan.a.gz) else() list(APPEND CMD_COMMANDS COMMAND ${CMAKE_COMMAND} -E chdir ${BUILD_PATH} buildkite-agent artifact upload ${filename}) endif() diff --git a/cmake/Options.cmake b/cmake/Options.cmake index ac3f2fd439..412cb068b3 100644 --- a/cmake/Options.cmake +++ b/cmake/Options.cmake @@ -91,7 +91,19 @@ endif() optionx(CANARY_REVISION STRING "The canary revision of the build" DEFAULT ${DEFAULT_CANARY_REVISION}) -if(RELEASE AND LINUX AND CI) +if(LINUX) + optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF) +endif() + +if(DEBUG AND APPLE AND ARCH STREQUAL "aarch64") + set(DEFAULT_ASAN ON) +else() + set(DEFAULT_ASAN OFF) +endif() + +optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN}) + +if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN) set(DEFAULT_LTO ON) else() set(DEFAULT_LTO OFF) @@ -99,16 +111,10 @@ endif() optionx(ENABLE_LTO BOOL "If LTO (link-time optimization) should be used" DEFAULT ${DEFAULT_LTO}) -if(LINUX) - optionx(ENABLE_VALGRIND BOOL "If Valgrind support should be enabled" DEFAULT OFF) +if(ENABLE_ASAN AND ENABLE_LTO) + message(WARNING "ASAN and LTO are not supported together, disabling LTO") + setx(ENABLE_LTO OFF) endif() -if(DEBUG AND APPLE AND CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64") - optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ON) -else() - optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT OFF) -endif() - -optionx(ENABLE_PRETTIER BOOL "If prettier should be ran" DEFAULT OFF) if(USE_VALGRIND AND NOT USE_BASELINE) message(WARNING "If valgrind is enabled, baseline must also be enabled") diff --git a/cmake/targets/BuildBun.cmake b/cmake/targets/BuildBun.cmake index 62a582b79c..c7478dbea2 100644 --- a/cmake/targets/BuildBun.cmake +++ b/cmake/targets/BuildBun.cmake @@ -1,8 +1,7 @@ if(DEBUG) set(bun bun-debug) -# elseif(ENABLE_SMOL) -# set(bun bun-smol-profile) -# set(bunStrip bun-smol) +elseif(ENABLE_ASAN) + set(bun bun-asan) elseif(ENABLE_VALGRIND) set(bun bun-valgrind) elseif(ENABLE_ASSERTIONS) @@ -599,6 +598,7 @@ register_command( -Doptimize=${ZIG_OPTIMIZE} -Dcpu=${ZIG_CPU} -Denable_logs=$,true,false> + -Denable_asan=$,true,false> -Dversion=${VERSION} -Dreported_nodejs_version=${NODEJS_VERSION} -Dcanary=${CANARY_REVISION} @@ -888,7 +888,7 @@ if(NOT WIN32) ) endif() - if (ENABLE_ASAN) + if(ENABLE_ASAN) target_compile_options(${bun} PUBLIC -fsanitize=address ) @@ -931,6 +931,15 @@ if(NOT WIN32) -Wno-nullability-completeness -Werror ) + + if(ENABLE_ASAN) + target_compile_options(${bun} PUBLIC + -fsanitize=address + ) + target_link_libraries(${bun} PUBLIC + -fsanitize=address + ) + endif() endif() else() target_compile_options(${bun} PUBLIC @@ -1272,7 +1281,10 @@ if(NOT BUN_CPP_ONLY) if(ENABLE_BASELINE) set(bunTriplet ${bunTriplet}-baseline) endif() - string(REPLACE bun ${bunTriplet} bunPath ${bun}) + if(ENABLE_ASAN) + set(bunTriplet ${bunTriplet}-asan) + endif() + set(bunPath ${bunTriplet}) set(bunFiles ${bunExe} features.json) if(WIN32) list(APPEND bunFiles ${bun}.pdb) diff --git a/cmake/tools/SetupBuildkite.cmake b/cmake/tools/SetupBuildkite.cmake index b2ab01dd94..e1d8b32cde 100644 --- a/cmake/tools/SetupBuildkite.cmake +++ b/cmake/tools/SetupBuildkite.cmake @@ -122,6 +122,8 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX}) if(BUILDKITE) if(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-profile.a") set(BUILDKITE_ARTIFACT_PATH libbun-profile.a.gz) + elseif(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-asan.a") + set(BUILDKITE_ARTIFACT_PATH libbun-asan.a.gz) endif() set(BUILDKITE_DOWNLOAD_COMMAND buildkite-agent artifact download ${BUILDKITE_ARTIFACT_PATH} . --build ${BUILDKITE_BUILD_UUID} --step ${BUILDKITE_JOB_ID}) else() @@ -151,6 +153,19 @@ foreach(i RANGE ${BUILDKITE_JOBS_MAX_INDEX}) DEPENDS ${BUILD_PATH}/libbun-profile.a.gz ) + elseif(BUILDKITE_ARTIFACT_PATH STREQUAL "libbun-asan.a.gz") + add_custom_command( + COMMENT + "Unpacking libbun-asan.a.gz" + VERBATIM COMMAND + gunzip libbun-asan.a.gz + WORKING_DIRECTORY + ${BUILD_PATH} + OUTPUT + ${BUILD_PATH}/libbun-asan.a + DEPENDS + ${BUILD_PATH}/libbun-asan.a.gz + ) endif() endforeach() diff --git a/cmake/tools/SetupWebKit.cmake b/cmake/tools/SetupWebKit.cmake index e9fe5f720a..86f6d43dda 100644 --- a/cmake/tools/SetupWebKit.cmake +++ b/cmake/tools/SetupWebKit.cmake @@ -73,13 +73,13 @@ endif() if(DEBUG) set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-debug") -elseif(ENABLE_LTO AND NOT WIN32) +elseif(ENABLE_LTO) set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-lto") else() set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}") endif() -if(ENABLE_ASAN) +if(ENABLE_ASAN AND ((APPLE AND DEBUG AND ARCH STREQUAL "aarch64") OR (LINUX AND RELEASE))) set(WEBKIT_SUFFIX "${WEBKIT_SUFFIX}-asan") endif() diff --git a/cmake/tools/SetupZig.cmake b/cmake/tools/SetupZig.cmake index b7042e4f39..e14d851a92 100644 --- a/cmake/tools/SetupZig.cmake +++ b/cmake/tools/SetupZig.cmake @@ -24,7 +24,11 @@ set(ZIG_COMMIT "a207204ee57a061f2fb96c7bae0c491b609e73a5") optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET}) if(CMAKE_BUILD_TYPE STREQUAL "Release") - set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast") + if(ENABLE_ASAN) + set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe") + else() + set(DEFAULT_ZIG_OPTIMIZE "ReleaseFast") + endif() elseif(CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo") set(DEFAULT_ZIG_OPTIMIZE "ReleaseSafe") elseif(CMAKE_BUILD_TYPE STREQUAL "MinSizeRel") diff --git a/docs/project/asan.md b/docs/project/asan.md new file mode 100644 index 0000000000..ef50a15370 --- /dev/null +++ b/docs/project/asan.md @@ -0,0 +1,124 @@ +# ASAN Builds for Bun + +This document explains how to use and configure ASAN (Address Sanitizer) builds for Bun. + +> **Note**: ASAN builds are available in CI for Linux and are configured to help identify memory issues in release builds. + +## What is ASAN? + +ASAN (Address Sanitizer) is a memory error detector for C/C++ and Zig code. It can detect: + +- Use-after-free +- Heap buffer overflow +- Stack buffer overflow +- Global buffer overflow +- Use-after-return +- Use-after-scope +- Initialization order bugs +- Memory leaks + +## ASAN Builds in CI + +Bun CI includes ASAN builds to catch memory errors. These builds are configured with: + +- Release optimizations for speed +- ASAN instrumentation for memory error detection +- Assertions enabled for both Bun and WebKit + +The CI pipeline automatically: + +- Builds a special ASAN-enabled release build for Linux +- Runs all tests to thoroughly check for memory issues +- Uses reduced parallelism to avoid memory pressure during testing +- Applies suppressions for known false positives +- Extends test timeouts to accommodate ASAN overhead +- Includes ASAN builds in release artifacts for debugging purposes + +## Local ASAN Builds + +To build Bun with ASAN locally, you can use the npm script: + +```bash +# Build a release build with ASAN and assertions (recommended) +bun run build:asan +``` + +Or manually with CMake: + +```bash +# Debug build with ASAN +cmake -B build -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON + +# Release build with ASAN +cmake -B build -DCMAKE_BUILD_TYPE=Release -DENABLE_ASAN=ON + +# Release build with ASAN and assertions +cmake -B build -DCMAKE_BUILD_TYPE=Release -DENABLE_ASAN=ON -DENABLE_ASSERTIONS=ON +``` + +## Running with ASAN + +When running an ASAN build, you can configure behavior with environment variables: + +```bash +# Basic ASAN options - leak detection disabled (recommended) +ASAN_OPTIONS=detect_leaks=0:halt_on_error=0:detect_odr_violation=0 ./build/bun-asan + +# If you really need leak detection (will produce A LOT of noise) +# ASAN_OPTIONS=detect_leaks=1:leak_check_at_exit=1:halt_on_error=0 ./build/bun-asan +# LSAN_OPTIONS=suppressions=lsan.supp:print_suppressions=1 ./build/bun-asan +``` + +> **Warning**: Enabling leak detection will generate excessive noise due to deliberately uncollected memory in WebKit and other components. It's recommended to keep leak detection disabled and focus on other memory errors like use-after-free, buffer overflows, etc. + +## Other Memory Error Types + +ASAN can detect several types of memory errors: + +1. **Use-after-free**: When a program continues to use memory after it's been freed +2. **Buffer overflow**: When a program writes beyond the bounds of allocated memory +3. **Stack overflow**: When a function's stack usage exceeds available space +4. **Memory corruption**: Often caused by writing to invalid memory locations +5. **Use-after-return**: When a function returns a pointer to stack memory that's no longer valid + +When an error is detected, ASAN will print a helpful report showing: + +- The type of error +- The memory address where the error occurred +- A stack trace showing the code path that led to the error +- Information about the memory allocation/deallocation (if relevant) + +Example error output: + +``` +==1234==ERROR: AddressSanitizer: heap-use-after-free on address 0x614000000044 at pc 0x55d8e2ac1f14... +READ of size 4 at 0x614000000044 thread T0 + #0 0x55d8e2ac1f14 in main example.c:10 + #1 0x7f91e6f5e0b2 in __libc_start_main... +``` + +## Understanding ASAN Reports + +ASAN reports contain detailed information about memory errors: + +``` +==12345==ERROR: AddressSanitizer: heap-use-after-free on address 0x7f7ddab8c084 +READ of size 4 at 0x7f7ddab8c084 thread T0 + #0 0x43b45a in Function source/file.cpp:123:45 + #1 0x44af90 in AnotherFunction source/file.cpp:234:10 + ... +``` + +Key components of the report: + +- Error type (heap-use-after-free, heap-buffer-overflow, etc.) +- Operation (READ/WRITE) and size +- Stack trace showing where the error occurred +- Information about the allocated/freed memory + +## Best Practices + +1. Run tests with ASAN builds regularly +2. Add suppressions only for well-understood false positives +3. Fix real issues promptly - ASAN errors indicate real problems +4. Consider using ASAN in debug builds during development diff --git a/package.json b/package.json index e7a56cac1c..72eb88a3db 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,7 @@ "build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release", "build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh", "build:assert": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=RelWithDebInfo -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=ON -B build/release-assert", + "build:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_ASSERTIONS=ON -DENABLE_LOGS=OFF -DENABLE_ASAN=ON -DENABLE_LTO=OFF -B build/release-asan", "build:logs": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=ON -B build/release-logs", "build:safe": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=ReleaseSafe -B build/release-safe", "build:smol": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=MinSizeRel -B build/release-smol", diff --git a/packages/bun-usockets/src/crypto/openssl.c b/packages/bun-usockets/src/crypto/openssl.c index a044df1cac..86a8ee7b27 100644 --- a/packages/bun-usockets/src/crypto/openssl.c +++ b/packages/bun-usockets/src/crypto/openssl.c @@ -1364,7 +1364,7 @@ void us_internal_ssl_socket_context_add_server_name( if (ssl_context) { /* Attach the user data to this context */ if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) { -#if BUN_DEBUG +#if ASSERT_ENABLED printf("CANNOT SET EX DATA!\n"); abort(); #endif @@ -1392,7 +1392,7 @@ int us_bun_internal_ssl_socket_context_add_server_name( /* Attach the user data to this context */ if (1 != SSL_CTX_set_ex_data(ssl_context, 0, user)) { -#if BUN_DEBUG +#if ASSERT_ENABLED printf("CANNOT SET EX DATA!\n"); abort(); #endif diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index f6fb37dac5..0e706800bf 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -910,6 +910,9 @@ install_llvm() { bash="$(require bash)" llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")" execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all + + # Install llvm-symbolizer explicitly to ensure it's available for ASAN + install_packages "llvm-$(llvm_version)-tools" ;; brew) install_packages "llvm@$(llvm_version)" @@ -920,7 +923,8 @@ install_llvm() { "llvm$(llvm_version)" \ "clang$(llvm_version)" \ "scudo-malloc" \ - "lld" + "lld" \ + "llvm$(llvm_version)-dev" # Ensures llvm-symbolizer is installed ;; esac } @@ -994,6 +998,8 @@ install_gcc() { execute_sudo ln -sf $(which ld.lld-$llvm_v) /usr/bin/ld execute_sudo ln -sf $(which clang) /usr/bin/cc execute_sudo ln -sf $(which clang++) /usr/bin/c++ + # Make sure llvm-symbolizer is available for ASAN + execute_sudo ln -sf $(which llvm-symbolizer-$llvm_v) /usr/bin/llvm-symbolizer } install_ccache() { diff --git a/scripts/machine.mjs b/scripts/machine.mjs index 8d0ae49ec0..fe7dd0cbbb 100755 --- a/scripts/machine.mjs +++ b/scripts/machine.mjs @@ -800,7 +800,7 @@ export function getDiskSize(options) { return 60; } - return 30; + return 40; } /** @@ -1192,7 +1192,7 @@ async function main() { tags, cpuCount: parseInt(args["cpu-count"]) || undefined, memoryGb: parseInt(args["memory-gb"]) || undefined, - diskSizeGb: parseInt(args["disk-size-gb"]) || undefined, + diskSizeGb: parseInt(args["disk-size-gb"]) || void 0, preemptible: !!args["preemptible"] || !!args["spot"], detached: !!args["detached"], bootstrap: args["no-bootstrap"] !== true, diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index a2ede9d72d..d52bf8a1c9 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -25,17 +25,23 @@ import { } from "node:fs"; import { readFile } from "node:fs/promises"; import { userInfo } from "node:os"; -import { basename, dirname, join, relative, sep } from "node:path"; +import { basename, dirname, join, relative, sep, extname } from "node:path"; import { parseArgs } from "node:util"; import { + getAbi, + getAbiVersion, + getArch, getBranch, getBuildLabel, getBuildUrl, getCommit, + getDistro, + getDistroVersion, getEnv, getFileUrl, getHostname, getLoggedInUserCountOrDetails, + getOs, getSecret, getShell, getWindowsExitReason, @@ -156,7 +162,116 @@ if (options["quiet"]) { } /** - * + * @typedef {Object} TestExpectation + * @property {string} filename + * @property {string[]} expectations + * @property {string[] | undefined} bugs + * @property {string[] | undefined} modifiers + * @property {string | undefined} comment + */ + +/** + * @returns {TestExpectation[]} + */ +function getTestExpectations() { + const expectationsPath = join(cwd, "test", "expectations.txt"); + if (!existsSync(expectationsPath)) { + return []; + } + const lines = readFileSync(expectationsPath, "utf-8").split(/\r?\n/); + + /** @type {TestExpectation[]} */ + const expectations = []; + + for (const line of lines) { + const content = line.trim(); + if (!content || content.startsWith("#")) { + continue; + } + + let comment; + const commentIndex = content.indexOf("#"); + let cleanLine = content; + if (commentIndex !== -1) { + comment = content.substring(commentIndex + 1).trim(); + cleanLine = content.substring(0, commentIndex).trim(); + } + + let modifiers = []; + let remaining = cleanLine; + let modifierMatch = remaining.match(/^\[(.*?)\]/); + if (modifierMatch) { + modifiers = modifierMatch[1].trim().split(/\s+/); + remaining = remaining.substring(modifierMatch[0].length).trim(); + } + + let expectationValues = ["Skip"]; + const expectationMatch = remaining.match(/\[(.*?)\]$/); + if (expectationMatch) { + expectationValues = expectationMatch[1].trim().split(/\s+/); + remaining = remaining.substring(0, remaining.length - expectationMatch[0].length).trim(); + } + + const filename = remaining.trim(); + if (filename) { + expectations.push({ + filename, + expectations: expectationValues, + bugs: undefined, + modifiers: modifiers.length ? modifiers : undefined, + comment, + }); + } + } + + return expectations; +} + +/** + * @param {string} testPath + * @returns {string[]} + */ +function getTestModifiers(testPath) { + const ext = extname(testPath); + const filename = basename(testPath, ext); + const modifiers = filename.split("-").filter(value => value !== "bun"); + + const os = getOs(); + const arch = getArch(); + modifiers.push(os, arch, `${os}-${arch}`); + + const distro = getDistro(); + if (distro) { + modifiers.push(distro, `${os}-${distro}`, `${os}-${arch}-${distro}`); + const distroVersion = getDistroVersion(); + if (distroVersion) { + modifiers.push( + distroVersion, + `${distro}-${distroVersion}`, + `${os}-${distro}-${distroVersion}`, + `${os}-${arch}-${distro}-${distroVersion}`, + ); + } + } + + const abi = getAbi(); + if (abi) { + modifiers.push(abi, `${os}-${abi}`, `${os}-${arch}-${abi}`); + const abiVersion = getAbiVersion(); + if (abiVersion) { + modifiers.push( + abiVersion, + `${abi}-${abiVersion}`, + `${os}-${abi}-${abiVersion}`, + `${os}-${arch}-${abi}-${abiVersion}`, + ); + } + } + + return modifiers.map(value => value.toUpperCase()); +} + +/** * @returns {Promise} */ async function runTests() { @@ -168,10 +283,14 @@ async function runTests() { } !isQuiet && console.log("Bun:", execPath); + const expectations = getTestExpectations(); + const modifiers = getTestModifiers(execPath); + !isQuiet && console.log("Modifiers:", modifiers); + const revision = getRevision(execPath); !isQuiet && console.log("Revision:", revision); - const tests = getRelevantTests(testsPath); + const tests = getRelevantTests(testsPath, modifiers, expectations); !isQuiet && console.log("Running tests:", tests.length); /** @type {VendorTest[] | undefined} */ @@ -693,8 +812,8 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { BUN_RUNTIME_TRANSPILER_CACHE_PATH: "0", BUN_INSTALL_CACHE_DIR: tmpdirPath, SHELLOPTS: isWindows ? "igncr" : undefined, // ignore "\r" on Windows - // Used in Node.js tests. - TEST_TMPDIR: tmpdirPath, + ASAN_OPTIONS: "allow_user_segv_handler=1", + TEST_TMPDIR: tmpdirPath, // Used in Node.js tests. }; if (env) { @@ -1072,9 +1191,6 @@ function isHidden(path) { return /node_modules|node.js/.test(dirname(path)) || /^\./.test(basename(path)); } -/** Files with these extensions are not treated as test cases */ -const IGNORED_EXTENSIONS = new Set([".md"]); - /** * @param {string} cwd * @returns {string[]} @@ -1084,13 +1200,14 @@ function getTests(cwd) { const dirname = join(cwd, path); for (const entry of readdirSync(dirname, { encoding: "utf-8", withFileTypes: true })) { const { name } = entry; - const ext = name.slice(name.lastIndexOf(".")); const filename = join(path, name); - if (isHidden(filename) || IGNORED_EXTENSIONS.has(ext)) { + if (isHidden(filename)) { continue; } - if (entry.isFile() && isTest(filename)) { - yield filename; + if (entry.isFile()) { + if (isTest(filename)) { + yield filename; + } } else if (entry.isDirectory()) { yield* getFiles(cwd, filename); } @@ -1226,9 +1343,11 @@ async function getVendorTests(cwd) { /** * @param {string} cwd + * @param {string[]} testModifiers + * @param {TestExpectation[]} testExpectations * @returns {string[]} */ -function getRelevantTests(cwd) { +function getRelevantTests(cwd, testModifiers, testExpectations) { let tests = getTests(cwd); const availableTests = []; const filteredTests = []; @@ -1272,6 +1391,25 @@ function getRelevantTests(cwd) { } } + const skipExpectations = testExpectations + .filter( + ({ modifiers, expectations }) => + !modifiers?.length || testModifiers.some(modifier => modifiers?.includes(modifier)), + ) + .map(({ filename }) => filename.replace("test/", "")); + if (skipExpectations.length) { + const skippedTests = availableTests.filter(testPath => skipExpectations.some(filter => isMatch(testPath, filter))); + if (skippedTests.length) { + for (const testPath of skippedTests) { + const index = availableTests.indexOf(testPath); + if (index !== -1) { + availableTests.splice(index, 1); + } + } + !isQuiet && console.log("Skipping tests:", skipExpectations, skippedTests.length, "/", availableTests.length); + } + } + const shardId = parseInt(options["shard"]); const maxShards = parseInt(options["max-shards"]); if (filters?.length) { @@ -1368,13 +1506,17 @@ async function getExecPathFromBuildKite(target, buildId) { await spawnSafe({ command: "buildkite-agent", args, + timeout: 60000, }); - for (const entry of readdirSync(releasePath, { recursive: true, encoding: "utf-8" })) { - if (/^bun.*\.zip$/i.test(entry) && !entry.includes("-profile.zip")) { - zipPath = join(releasePath, entry); - break downloadLoop; - } + zipPath = readdirSync(releasePath, { recursive: true, encoding: "utf-8" }) + .filter(filename => /^bun.*\.zip$/i.test(filename)) + .map(filename => join(releasePath, filename)) + .sort((a, b) => b.includes("profile") - a.includes("profile")) + .at(0); + + if (zipPath) { + break downloadLoop; } console.warn(`Waiting for ${target}.zip to be available...`); @@ -1390,12 +1532,12 @@ async function getExecPathFromBuildKite(target, buildId) { const releaseFiles = readdirSync(releasePath, { recursive: true, encoding: "utf-8" }); for (const entry of releaseFiles) { const execPath = join(releasePath, entry); - if (/bun(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) { + if (/bun(?:-[a-z]+)?(?:\.exe)?$/i.test(entry) && statSync(execPath).isFile()) { return execPath; } } - console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`); + console.warn(`Found ${releaseFiles.length} files in ${releasePath}:`, releaseFiles); throw new Error(`Could not find executable from BuildKite: ${releasePath}`); } diff --git a/scripts/utils.mjs b/scripts/utils.mjs index a4ad1875fe..4440da7bc6 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -2332,10 +2332,11 @@ function parseLevel(level) { * @returns {Annotation} */ export function parseAnnotation(options, context) { + const cwd = (context?.["cwd"] || process.cwd()).replace(/\\/g, "/"); const source = options["source"]; const level = parseLevel(options["level"]); const title = options["title"] || (source ? `${source} ${level}` : level); - const filename = options["filename"]; + const path = options["filename"]?.replace(/\\/g, "/"); const line = parseInt(options["line"]) || undefined; const column = parseInt(options["column"]) || undefined; const content = options["content"]; @@ -2354,6 +2355,13 @@ export function parseAnnotation(options, context) { relevantLines.push(line); } + let filename; + if (path?.startsWith(cwd)) { + filename = path.slice(cwd.length + 1); + } else { + filename = path; + } + return { source, title, @@ -2894,6 +2902,7 @@ const emojiMap = { true: ["✅", "white_check_mark"], false: ["❌", "x"], debug: ["🐞", "bug"], + asan: ["🐛", "bug"], assert: ["🔍", "mag"], release: ["🏆", "trophy"], gear: ["⚙️", "gear"], diff --git a/src/bun.js/bindings/BunProcess.cpp b/src/bun.js/bindings/BunProcess.cpp index b6f7f70179..c9afb6f6f1 100644 --- a/src/bun.js/bindings/BunProcess.cpp +++ b/src/bun.js/bindings/BunProcess.cpp @@ -2177,7 +2177,7 @@ static JSValue constructStdioWriteStream(JSC::JSGlobalObject* globalObject, int RETURN_IF_EXCEPTION(scope, {}); if (auto* exception = returnedException.get()) { -#if BUN_DEBUG +#if ASSERT_ENABLED Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception); #endif scope.throwException(globalObject, exception->value()); @@ -2243,7 +2243,7 @@ static JSValue constructStdin(VM& vm, JSObject* processObject) RETURN_IF_EXCEPTION(scope, {}); if (auto* exception = returnedException.get()) { -#if BUN_DEBUG +#if ASSERT_ENABLED Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception); #endif scope.throwException(globalObject, exception->value()); @@ -2323,7 +2323,7 @@ static JSValue constructProcessChannel(VM& vm, JSObject* processObject) RETURN_IF_EXCEPTION(scope, {}); if (auto* exception = returnedException.get()) { -#if BUN_DEBUG +#if ASSERT_ENABLED Zig::GlobalObject::reportUncaughtExceptionAtEventLoop(globalObject, exception); #endif scope.throwException(globalObject, exception->value()); diff --git a/src/bun.js/bindings/ErrorCode.cpp b/src/bun.js/bindings/ErrorCode.cpp index 03b3ae1760..981a97b8e0 100644 --- a/src/bun.js/bindings/ErrorCode.cpp +++ b/src/bun.js/bindings/ErrorCode.cpp @@ -1584,7 +1584,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject JSC::JSValue codeValue = callFrame->argument(0); RETURN_IF_EXCEPTION(scope, {}); -#if BUN_DEBUG +#if ASSERT_ENABLED if (!codeValue.isNumber()) { JSC::throwTypeError(globalObject, scope, "First argument to $ERR_ must be a number"_s); return {}; @@ -1593,7 +1593,7 @@ JSC_DEFINE_HOST_FUNCTION(Bun::jsFunctionMakeErrorWithCode, (JSC::JSGlobalObject int code = codeValue.toInt32(globalObject); -#if BUN_DEBUG +#if ASSERT_ENABLED if (code > Bun::NODE_ERROR_COUNT - 1 || code < 0) { JSC::throwTypeError(globalObject, scope, "Invalid error code. Use $ERR_* constants"_s); return {}; diff --git a/src/bun.js/bindings/JS2Native.cpp b/src/bun.js/bindings/JS2Native.cpp index 0d93d22aff..6126a05d08 100644 --- a/src/bun.js/bindings/JS2Native.cpp +++ b/src/bun.js/bindings/JS2Native.cpp @@ -22,7 +22,7 @@ JSC_DEFINE_HOST_FUNCTION(jsDollarLazy, (JSC::JSGlobalObject * lexicalGlobalObjec { JSC::JSValue target = callFrame->uncheckedArgument(0); -#if BUN_DEBUG +#if ASSERT_ENABLED ASSERT_WITH_MESSAGE(target.isInt32(), "In call to $lazy: expected Int32, got %s", target.toWTFString(lexicalGlobalObject).utf8().data()); #endif diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index b00d3dfd27..d0af026ca4 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -266,6 +266,17 @@ extern "C" void JSCInitialize(const char* envp[], size_t envc, void (*onCrash)(c std::set_terminate([]() { Zig__GlobalObject__onCrash(); }); WTF::initializeMainThread(); + +#if ASAN_ENABLED && OS(LINUX) + { + JSC::Options::AllowUnfinalizedAccessScope scope; + + // ASAN interferes with JSC's signal handlers + JSC::Options::useWasmFaultSignalHandler() = false; + JSC::Options::useWasmFastMemory() = false; + } +#endif + JSC::initialize(); { @@ -1997,7 +2008,7 @@ static inline std::optional invokeReadableStreamFunction(JSC::JSGl auto scope = DECLARE_CATCH_SCOPE(vm); auto callData = JSC::getCallData(function); auto result = call(&lexicalGlobalObject, function, callData, thisValue, arguments); -#if BUN_DEBUG +#if ASSERT_ENABLED if (scope.exception()) { Bun__reportError(&lexicalGlobalObject, JSValue::encode(scope.exception())); } @@ -4053,7 +4064,7 @@ JSC::JSInternalPromise* GlobalObject::moduleLoaderImportModule(JSGlobalObject* j BunString moduleNameZ; String moduleName = moduleNameValue->value(globalObject); -#if BUN_DEBUG +#if ASSERT_ENABLED auto startRefCount = moduleName.impl()->refCount(); #endif if (moduleName.startsWith("file://"_s)) { diff --git a/src/bun.js/bindings/bindings.cpp b/src/bun.js/bindings/bindings.cpp index 2d53c70739..33bcf0a3e5 100644 --- a/src/bun.js/bindings/bindings.cpp +++ b/src/bun.js/bindings/bindings.cpp @@ -133,7 +133,7 @@ #include "wtf-bindings.h" #if OS(DARWIN) -#if BUN_DEBUG +#if ASSERT_ENABLED #if !__has_feature(address_sanitizer) #include #define IS_MALLOC_DEBUGGING_ENABLED 1 @@ -2654,7 +2654,7 @@ JSC::EncodedJSValue JSObjectCallAsFunctionReturnValueHoldingAPILock(JSContextRef JSC::JSLockHolder lock(vm); -#if BUN_DEBUG +#if ASSERT_ENABLED // This is a redundant check, but we add it to make the error message clearer. ASSERT_WITH_MESSAGE(!vm.isCollectorBusyOnCurrentThread(), "Cannot call function inside a finalizer or while GC is running on same thread."); #endif @@ -6431,7 +6431,7 @@ extern "C" EncodedJSValue Bun__JSObject__getCodePropertyVMInquiry(JSC::JSGlobalO return JSValue::encode(slot.getPureResult()); } -#if BUN_DEBUG +#if ASSERT_ENABLED CPP_DECL const char* Bun__CallFrame__describeFrame(JSC::CallFrame* callFrame) { return callFrame->describeFrame(); diff --git a/src/bun.js/bindings/napi_external.h b/src/bun.js/bindings/napi_external.h index 26b61f9d5f..c34a4272a7 100644 --- a/src/bun.js/bindings/napi_external.h +++ b/src/bun.js/bindings/napi_external.h @@ -59,7 +59,7 @@ public: accessor->finishCreation(vm, value, finalizer_hint, env, callback); -#if BUN_DEBUG +#if ASSERT_ENABLED if (auto* callFrame = vm.topCallFrame) { auto origin = callFrame->callerSourceOrigin(vm); accessor->sourceOriginURL = origin.string(); @@ -97,7 +97,7 @@ public: NapiFinalizer m_finalizer; napi_env m_env; -#if BUN_DEBUG +#if ASSERT_ENABLED String sourceOriginURL = String(); unsigned sourceOriginLine = 0; unsigned sourceOriginColumn = 0; diff --git a/src/bun.js/bindings/wtf-bindings.cpp b/src/bun.js/bindings/wtf-bindings.cpp index f549709e66..c9369cf736 100644 --- a/src/bun.js/bindings/wtf-bindings.cpp +++ b/src/bun.js/bindings/wtf-bindings.cpp @@ -245,4 +245,8 @@ extern "C" void* Bun__StackCheck__getMaxStack() return stackBoundsForCurrentThread.end(); } +extern "C" void WTF__DumpStackTrace(void** stack, size_t stack_count) +{ + WTFPrintBacktrace({ stack, stack_count }); +} } diff --git a/src/bun.js/modules/BunJSCModule.h b/src/bun.js/modules/BunJSCModule.h index 96474c4496..dd8b050951 100644 --- a/src/bun.js/modules/BunJSCModule.h +++ b/src/bun.js/modules/BunJSCModule.h @@ -50,7 +50,7 @@ #include #if OS(DARWIN) -#if BUN_DEBUG +#if ASSERT_ENABLED #if !__has_feature(address_sanitizer) #include #define IS_MALLOC_DEBUGGING_ENABLED 1 diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 86e03d868e..76ce3a763a 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -5906,9 +5906,6 @@ pub const NodeFS = struct { Maybe(Return.Utimes).success; } - bun.assert(args.mtime.nsec <= 1e9); - bun.assert(args.atime.nsec <= 1e9); - return switch (Syscall.utimens( args.path.sliceZ(&this.sync_error_buf), args.atime, diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index e4ee58c078..eba547f486 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -10784,11 +10784,11 @@ pub const LinkerContext = struct { var worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c)); defer worker.unget(); - const prev_action = if (Environment.isDebug) bun.crash_handler.current_action; - defer if (Environment.isDebug) { + const prev_action = if (Environment.show_crash_trace) bun.crash_handler.current_action; + defer if (Environment.show_crash_trace) { bun.crash_handler.current_action = prev_action; }; - if (Environment.isDebug) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{ + if (Environment.show_crash_trace) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{ .chunk = ctx.chunk, .context = ctx.c, .part_range = &part_range.part_range, @@ -10940,17 +10940,17 @@ pub const LinkerContext = struct { var worker = ThreadPool.Worker.get(@fieldParentPtr("linker", ctx.c)); defer worker.unget(); - const prev_action = if (Environment.isDebug) bun.crash_handler.current_action; - defer if (Environment.isDebug) { + const prev_action = if (Environment.show_crash_trace) bun.crash_handler.current_action; + defer if (Environment.show_crash_trace) { bun.crash_handler.current_action = prev_action; }; - if (Environment.isDebug) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{ + if (Environment.show_crash_trace) bun.crash_handler.current_action = .{ .bundle_generate_chunk = .{ .chunk = ctx.chunk, .context = ctx.c, .part_range = &part_range.part_range, } }; - if (Environment.isDebug) { + if (Environment.show_crash_trace) { const path = ctx.c.parse_graph.input_files.items(.source)[part_range.part_range.source_index.get()].path; if (bun.CLI.debug_flags.hasPrintBreakpoint(path)) { @breakpoint(); diff --git a/src/cli.zig b/src/cli.zig index c3480e2e7b..1b56a46b3b 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -77,7 +77,7 @@ pub const Cli = struct { pub threadlocal var is_main_thread: bool = false; }; -pub const debug_flags = if (Environment.isDebug) struct { +pub const debug_flags = if (Environment.show_crash_trace) struct { var resolve_breakpoints: []const []const u8 = &.{}; var print_breakpoints: []const []const u8 = &.{}; @@ -183,7 +183,7 @@ pub const Arguments = struct { pub const ParamType = clap.Param(clap.Help); - const base_params_ = (if (Environment.isDebug) debug_params else [_]ParamType{}) ++ [_]ParamType{ + const base_params_ = (if (Environment.show_crash_trace) debug_params else [_]ParamType{}) ++ [_]ParamType{ clap.parseParam("--env-file ... Load environment variables from the specified file(s)") catch unreachable, clap.parseParam("--cwd Absolute path to resolve files & entry points from. This just changes the process' cwd.") catch unreachable, clap.parseParam("-c, --config ? Specify path to Bun config file. Default $cwd/bunfig.toml") catch unreachable, @@ -1280,7 +1280,7 @@ pub const Arguments = struct { } } - if (Environment.isDebug) { + if (Environment.show_crash_trace) { debug_flags.resolve_breakpoints = args.options("--breakpoint-resolve"); debug_flags.print_breakpoints = args.options("--breakpoint-print"); } diff --git a/src/crash_handler.zig b/src/crash_handler.zig index 186786bd4e..3746d94eb9 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -117,7 +117,7 @@ pub const Action = union(enum) { print: []const u8, /// bun.bundle_v2.LinkerContext.generateCompileResultForJSChunk - bundle_generate_chunk: if (bun.Environment.isDebug) struct { + bundle_generate_chunk: if (bun.Environment.show_crash_trace) struct { context: *const anyopaque, // unfortunate dependency loop workaround chunk: *const bun.bundle_v2.Chunk, part_range: *const bun.bundle_v2.PartRange, @@ -127,7 +127,7 @@ pub const Action = union(enum) { } } else void, - resolver: if (bun.Environment.isDebug) struct { + resolver: if (bun.Environment.show_crash_trace) struct { source_dir: []const u8, import_path: []const u8, kind: bun.ImportKind, @@ -140,7 +140,7 @@ pub const Action = union(enum) { .parse => |path| try writer.print("parsing {s}", .{path}), .visit => |path| try writer.print("visiting {s}", .{path}), .print => |path| try writer.print("printing {s}", .{path}), - .bundle_generate_chunk => |data| if (bun.Environment.isDebug) { + .bundle_generate_chunk => |data| if (bun.Environment.show_crash_trace) { try writer.print( \\generating bundler chunk \\ chunk entry point: {?s} @@ -159,7 +159,7 @@ pub const Action = union(enum) { }, ); }, - .resolver => |res| if (bun.Environment.isDebug) { + .resolver => |res| if (bun.Environment.show_crash_trace) { try writer.print("resolving {s} from {s} ({s})", .{ res.import_path, res.source_dir, @@ -217,7 +217,7 @@ pub fn crashHandler( // // To make the release-mode behavior easier to demo, debug mode // checks for this CLI flag. - const debug_trace = bun.Environment.isDebug and check_flag: { + const debug_trace = bun.Environment.show_crash_trace and check_flag: { for (bun.argv) |arg| { if (bun.strings.eqlComptime(arg, "--debug-crash-handler-use-trace-string")) { break :check_flag false; @@ -482,7 +482,7 @@ pub fn crashHandler( /// This is called when `main` returns a Zig error. /// We don't want to treat it as a crash under certain error codes. pub fn handleRootError(err: anyerror, error_return_trace: ?*std.builtin.StackTrace) noreturn { - var show_trace = bun.Environment.isDebug; + var show_trace = bun.Environment.show_crash_trace; switch (err) { error.OutOfMemory => bun.outOfMemory(), @@ -722,7 +722,7 @@ pub fn handleRootError(err: anyerror, error_return_trace: ?*std.builtin.StackTra else => { Output.errGeneric( - if (bun.Environment.isDebug) + if (bun.Environment.show_crash_trace) "'main' returned error.{s}" else "An internal error occurred ({s})", @@ -815,7 +815,7 @@ fn handleSegfaultPosix(sig: i32, info: *const std.posix.siginfo_t, _: ?*const an var did_register_sigaltstack = false; var sigaltstack: [512 * 1024]u8 = undefined; -pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void { +fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void { if (act) |act_| { if (!did_register_sigaltstack) { var stack: std.c.stack_t = .{ @@ -840,6 +840,7 @@ pub fn updatePosixSegfaultHandler(act: ?*std.posix.Sigaction) !void { var windows_segfault_handle: ?windows.HANDLE = null; pub fn resetOnPosix() void { + if (bun.Environment.enable_asan) return; var act = std.posix.Sigaction{ .handler = .{ .sigaction = handleSegfaultPosix }, .mask = std.posix.empty_sigset, @@ -863,6 +864,7 @@ pub fn init() void { pub fn resetSegfaultHandler() void { if (!enable) return; + if (bun.Environment.enable_asan) return; if (bun.Environment.os == .windows) { if (windows_segfault_handle) |handle| { @@ -1370,6 +1372,9 @@ fn isReportingEnabled() bool { if (bun.Environment.isDebug) return false; + if (bun.Environment.enable_asan) + return false; + // Honor DO_NOT_TRACK if (!bun.analytics.isEnabled()) return false; @@ -1589,13 +1594,14 @@ inline fn handleErrorReturnTraceExtra(err: anyerror, maybe_trace: ?*std.builtin. pub inline fn handleErrorReturnTrace(err: anyerror, maybe_trace: ?*std.builtin.StackTrace) void { handleErrorReturnTraceExtra(err, maybe_trace, false); } +extern "c" fn WTF__DumpStackTrace(ptr: [*]usize, count: usize) void; /// Version of the standard library dumpStackTrace that has some fallbacks for /// cases where such logic fails to run. pub fn dumpStackTrace(trace: std.builtin.StackTrace, limits: WriteStackTraceLimits) void { Output.flush(); const stderr = std.io.getStdErr().writer(); - if (!bun.Environment.isDebug) { + if (!bun.Environment.show_crash_trace) { // debug symbols aren't available, lets print a tracestring stderr.print("View Debug Trace: {}\n", .{TraceString{ .action = .view_trace, @@ -1621,6 +1627,8 @@ pub fn dumpStackTrace(trace: std.builtin.StackTrace, limits: WriteStackTraceLimi .linux => { // Linux doesnt seem to be able to decode it's own debug info. // TODO(@paperclover): see if zig 0.14 fixes this + WTF__DumpStackTrace(trace.instruction_addresses.ptr, trace.instruction_addresses.len); + return; }, else => { // Assume debug symbol tooling is reliable. diff --git a/src/env.zig b/src/env.zig index 6387a76ec1..645cb4fff0 100644 --- a/src/env.zig +++ b/src/env.zig @@ -26,7 +26,7 @@ pub const isX86 = @import("builtin").target.cpu.arch.isX86(); pub const isX64 = @import("builtin").target.cpu.arch == .x86_64; pub const isMusl = builtin.target.abi.isMusl(); pub const allow_assert = isDebug or isTest or std.builtin.Mode.ReleaseSafe == @import("builtin").mode; - +pub const show_crash_trace = isDebug or isTest or enable_asan; /// All calls to `@export` should be gated behind this check, so that code /// generators that compile Zig code know not to reference and compile a ton of /// unused code. @@ -49,6 +49,7 @@ pub const canary_revision = if (is_canary) build_options.canary_revision else "" pub const dump_source = isDebug and !isTest; pub const base_path = build_options.base_path; pub const enable_logs = build_options.enable_logs; +pub const enable_asan = build_options.enable_asan; pub const codegen_path = build_options.codegen_path; pub const codegen_embed = build_options.codegen_embed; diff --git a/src/fs.zig b/src/fs.zig index fbd4c8ef87..28ae78ad5a 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -800,8 +800,6 @@ pub const FileSystem = struct { pub const Limit = struct { pub var handles: usize = 0; pub var handles_before = std.mem.zeroes(if (Environment.isPosix) std.posix.rlimit else struct {}); - pub var stack: usize = 0; - pub var stack_before = std.mem.zeroes(if (Environment.isPosix) std.posix.rlimit else struct {}); }; // Always try to max out how many files we can keep open diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 9a7574b07a..523d109a9e 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -672,17 +672,17 @@ pub const Resolver = struct { // Only setting 'current_action' in debug mode because module resolution // is done very often, and has a very low crash rate. - const prev_action = if (Environment.isDebug) bun.crash_handler.current_action; - if (Environment.isDebug) bun.crash_handler.current_action = .{ .resolver = .{ + const prev_action = if (Environment.show_crash_trace) bun.crash_handler.current_action; + if (Environment.show_crash_trace) bun.crash_handler.current_action = .{ .resolver = .{ .source_dir = source_dir, .import_path = import_path, .kind = kind, } }; - defer if (Environment.isDebug) { + defer if (Environment.show_crash_trace) { bun.crash_handler.current_action = prev_action; }; - if (Environment.isDebug and bun.CLI.debug_flags.hasResolveBreakpoint(import_path)) { + if (Environment.show_crash_trace and bun.CLI.debug_flags.hasResolveBreakpoint(import_path)) { bun.Output.debug("Resolving {s} from {s}", .{ import_path, source_dir, diff --git a/test/expectations.txt b/test/expectations.txt new file mode 100644 index 0000000000..6b96a637b9 --- /dev/null +++ b/test/expectations.txt @@ -0,0 +1,120 @@ +# Documentation: http://trac.webkit.org/wiki/TestExpectations +# Format: [modifier] test-name [[ expectations ]] [# comment] + +# Tests that are broken +test/cli/create/create-jsx.test.ts [ FAIL ] # false > react spa (no tailwind) > build +test/integration/bun-types/bun-types.test.ts [ FAIL ] # @types/bun integration test > checks without lib.dom.d.ts +test/bundler/native-plugin.test.ts [ FAIL ] # prints name when plugin crashes +test/cli/install/bun-run.test.ts [ FAIL ] # should pass arguments correctly in scripts +test/cli/run/run-crash-handler.test.ts [ FAIL ] # automatic crash reporter > segfault should report +test/regression/issue/17454/destructure_string.test.ts [ FAIL ] # destructure string does not become string + +# Tests that are flaky +test/js/bun/spawn/spawn-maxbuf.test.ts [ FLAKY ] + +# Tests skipped due to different log/line outputs +[ ASAN ] test/js/web/console/console-log.test.ts [ SKIP ] # log line mismatch +[ ASAN ] test/js/bun/util/reportError.test.ts [ SKIP ] # log line mismatch +[ ASAN ] test/js/node/child_process/child_process.test.ts [ SKIP ] # Unexpected identifier "WARNING" +[ ASAN ] test/js/bun/shell/bunshell.test.ts [ SKIP ] # bunshell > quiet > basic +[ ASAN ] test/bundler/cli.test.ts [ SKIP ] # debug logs +[ ASAN ] test/cli/install/bun-install.test.ts [ FLAKY ] # destroy(Closer) logs + +# Tests failed due to ASAN +[ ASAN ] test/js/node/test/parallel/test-common-gc.js [ FAIL ] +[ ASAN ] test/js/bun/spawn/spawn-streaming-stdin.test.ts [ FAIL ] +[ ASAN ] test/regression/issue/17454/destructure_string.test.ts [ FAIL ] +[ ASAN ] test/js/node/test/parallel/test-http-server-connections-checking-leak.js [ FAIL ] +[ ASAN ] test/js/node/test/parallel/test-zlib-invalid-input-memory.js [ FAIL ] +[ ASAN ] test/js/node/test/parallel/test-https-server-connections-checking-leak.js [ FAIL ] +[ ASAN ] test/js/bun/sqlite/sqlite.test.js [ FAIL ] # can continue to use existing statements after database has been GC'd +[ ASAN ] test/bake/dev/stress.test.ts [ FLAKY ] # DEV:stress-1: crash #18910 + +# Tests failed due to ASAN: attempting free on address which was not malloc()-ed +[ ASAN ] test/js/node/test/parallel/test-http2-removed-header-stays-removed.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-invalidheaderfields-client.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-writehead-array.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-headers-after-destroy.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-writehead.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-trailers.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-headers.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-options-server-request.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-write-empty-string.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-invalidheaderfield.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-options-server-response.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-server-set-header.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-connect-options.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-statusmessage.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-http2-compat-serverresponse-end.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-utimes.js [ CRASH ] +[ ASAN ] test/js/node/worker_threads/worker_threads.test.ts [ CRASH ] # After: threadId module and worker property is consistent +[ ASAN ] test/js/node/worker_threads/worker_destruction.test.ts [ CRASH ] # After: bun closes cleanly when Bun.connect is used in a Worker that is terminating +[ ASAN ] test/integration/vite-build/vite-build.test.ts [ CRASH ] +[ ASAN ] test/integration/next-pages/test/dev-server-ssr-100.test.ts [ CRASH ] +[ ASAN ] test/integration/next-pages/test/next-build.test.ts [ CRASH ] +[ ASAN ] test/js/third_party/next-auth/next-auth.test.ts [ CRASH ] +[ ASAN ] test/js/third_party/astro/astro-post.test.js [ CRASH ] +[ ASAN ] test/js/bun/wasm/wasi.test.js [ CRASH ] +[ ASAN ] test/regression/issue/ctrl-c.test.ts [ CRASH ] +[ ASAN ] test/cli/install/bun-repl.test.ts [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-intl.js [ CRASH ] +[ ASAN ] test/js/node/v8/v8-date-parser.test.js [ CRASH ] +[ ASAN ] test/cli/hot/hot.test.ts [ CRASH ] +[ ASAN ] test/js/node/watch/fs.watch.test.ts [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-watch.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-watch-file.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-update-file.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-linux-parallel-remove.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-watch-recursive-update-file.js [ CRASH ] +[ ASAN ] test/js/node/test/parallel/test-fs-promises-watch.js [ CRASH ] +[ ASAN ] test/cli/hot/watch.test.ts [ CRASH ] +[ ASAN ] test/js/bun/resolve/load-same-js-file-a-lot.test.ts [ CRASH ] +[ ASAN ] test/js/third_party/es-module-lexer/es-module-lexer.test.ts [ CRASH ] +[ ASAN ] test/bundler/esbuild/default.test.ts [ CRASH ] +[ ASAN ] test/bundler/bundler_edgecase.test.ts [ CRASH ] # After: edgecase/UsingWithSixImports +[ ASAN ] test/bundler/bundler_loader.test.ts [ CRASH ] # bun/wasm-is-copied-to-outdir +[ ASAN ] test/bundler/bundler_npm.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/sourcemap.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/hot.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/bundle.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/esm.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/css.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/html.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/react-spa.test.ts [ CRASH ] +[ ASAN ] test/bake/dev/ecosystem.test.ts [ CRASH ] + +# Tests failed due to ASAN: SEGV on unknown address +[ ASAN ] test/integration/next-pages/test/dev-server.test.ts [ CRASH ] + +# Tests failed due to ASAN: heap-use-after-free +[ ASAN ] test/js/first_party/ws/ws.test.ts [ CRASH ] + +# Tests failed due to ASAN: use-after-poison +[ ASAN ] test/js/node/test/parallel/test-worker-unref-from-message-during-exit.js [ CRASH ] +[ ASAN ] test/napi/napi.test.ts [ CRASH ] # can throw an exception from an async_complete_callback +[ ASAN ] test/js/node/http/node-http-uaf.test.ts [ CRASH ] # should not crash on abort (node-http-uaf-fixture.ts) + +# Tests failed due to ASAN: unknown-crash +[ ASAN ] test/js/sql/tls-sql.test.ts [ CRASH ] # After: Throws on illegal transactions + +# Tests failed due to ASAN: assertion failed +[ ASAN ] test/js/node/test/parallel/test-string-decoder-fuzz.js [ CRASH ] # ASSERTION FAILED: joinedLength + +# Tests timed out due to ASAN +[ ASAN ] test/js/node/util/test-aborted.test.ts [ TIMEOUT ] # aborted with gc cleanup +[ ASAN ] test/js/node/test/parallel/test-primitive-timer-leak.js [ TIMEOUT ] +[ ASAN ] test/js/bun/spawn/spawn.test.ts [ TIMEOUT ] +[ ASAN ] test/cli/inspect/inspect.test.ts [ TIMEOUT ] +[ ASAN ] test/js/node/test/parallel/test-gc-http-client-connaborted.js [ TIMEOUT ] +[ ASAN ] test/cli/inspect/BunFrontendDevServer.test.ts [ TIMEOUT ] + +# Tests failed due to memory leaks +[ ASAN ] test/js/node/url/pathToFileURL.test.ts [ LEAK ] # pathToFileURL doesn't leak memory +[ ASAN ] test/js/node/fs/abort-signal-leak-read-write-file.test.ts [ LEAK ] # should not leak memory with already aborted signals +[ ASAN ] test/js/web/streams/streams-leak.test.ts [ LEAK ] # Absolute memory usage remains relatively constant when reading and writing to a pipe +[ ASAN ] test/js/web/fetch/fetch-leak.test.ts [ LEAK ] +[ ASAN ] test/cli/run/require-cache.test.ts [ LEAK ] # files transpiled and loaded don't leak file paths > via require() +[ ASAN ] test/js/bun/spawn/spawn-pipe-leak.test.ts [ LEAK ] +[ ASAN ] test/js/node/http2/node-http2.test.js [ LEAK ] # should not leak memory +[ ASAN ] test/js/bun/http/req-url-leak.test.ts [ LEAK ] # req.url doesn't leak memory +[ ASAN ] test/js/bun/io/bun-write-leak.test.ts [ LEAK ] # Bun.write should not leak the output data diff --git a/test/harness.ts b/test/harness.ts index 1ef9fedda8..c49d7a0d42 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -31,6 +31,7 @@ export const isVerbose = process.env.DEBUG === "1"; // test.todoIf(isFlaky && isMacOS)("this test is flaky"); export const isFlaky = isCI; export const isBroken = isCI; +export const isASAN = basename(process.execPath).includes("bun-asan"); export const bunEnv: NodeJS.ProcessEnv = { ...process.env, @@ -49,6 +50,10 @@ export const bunEnv: NodeJS.ProcessEnv = { const ciEnv = { ...bunEnv }; +if (isASAN) { + bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1"; +} + if (isWindows) { bunEnv.SHELLOPTS = "igncr"; // Ignore carriage return } @@ -875,6 +880,7 @@ export function osSlashes(path: string) { } import * as child_process from "node:child_process"; +import { basename } from "node:path"; class WriteBlockedError extends Error { constructor(time) {