diff --git a/packages/bun-internal-test/src/runner.node.mjs b/packages/bun-internal-test/src/runner.node.mjs index 161924ef5a..6fe978f53a 100644 --- a/packages/bun-internal-test/src/runner.node.mjs +++ b/packages/bun-internal-test/src/runner.node.mjs @@ -124,7 +124,7 @@ const argv0 = argv0_stdout.toString().trim(); console.log(`Testing ${argv0} v${revision}`); -const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.22621.0\\shared\\ntstatus.h"; +const ntStatusPath = "C:\\Program Files (x86)\\Windows Kits\\10\\Include\\10.0.26100.0\\shared\\ntstatus.h"; let ntstatus_header_cache = null; function lookupWindowsError(code) { if (ntstatus_header_cache === null) { diff --git a/scripts/bootstrap.sh b/scripts/bootstrap.sh index 2ac531ef5f..99cd1c9ea7 100755 --- a/scripts/bootstrap.sh +++ b/scripts/bootstrap.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Version: 13 +# Version: 14 # A script that installs the dependencies needed to build and test Bun. # This should work on macOS and Linux with a POSIX shell. @@ -195,6 +195,17 @@ download_file() { print "$file_tmp_path" } +# path=$(download_and_verify_file URL sha256) +download_and_verify_file() { + file_url="$1" + hash="$2" + + path=$(download_file "$file_url") + execute sh -c 'echo "'"$hash $path"'" | sha256sum -c' >/dev/null 2>&1 + + print "$path" +} + append_to_profile() { content="$1" profiles=".profile .zprofile .bash_profile .bashrc .zshrc" @@ -400,7 +411,7 @@ check_package_manager() { pm="brew" ;; linux) - if [ -f "$(which apt)" ]; then + if [ -f "$(which apt-get)" ]; then pm="apt" elif [ -f "$(which dnf)" ]; then pm="dnf" @@ -470,10 +481,8 @@ check_ulimit() { print "Checking ulimits..." systemd_conf="/etc/systemd/system.conf" - if [ -f "$systemd_conf" ]; then - limits_conf="/etc/security/limits.d/99-unlimited.conf" - create_file "$limits_conf" - fi + limits_conf="/etc/security/limits.d/99-unlimited.conf" + create_file "$limits_conf" limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue" for limit in $limits; do @@ -495,6 +504,10 @@ check_ulimit() { fi if [ -f "$systemd_conf" ]; then + # in systemd's configuration you need to say "infinity" when you mean "unlimited" + if [ "$limit_value" = "unlimited" ]; then + limit_value="infinity" + fi append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value" fi done @@ -549,7 +562,7 @@ check_ulimit() { package_manager() { case "$pm" in apt) - execute_sudo apt "$@" + execute_sudo apt-get "$@" ;; dnf) case "$distro" in @@ -598,6 +611,7 @@ install_packages() { package_manager install \ --yes \ --no-install-recommends \ + --fix-missing \ "$@" ;; dnf) @@ -673,7 +687,7 @@ install_common_software() { esac case "$distro" in - amzn) + amzn | alpine) install_packages \ tar ;; @@ -1362,6 +1376,58 @@ install_chromium() { esac } +install_age() { + # we only use this to encrypt core dumps, which we only have on Linux + case "$os" in + linux) + age_tarball="" + case "$arch" in + x64) + age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-amd64.tar.gz 7df45a6cc87d4da11cc03a539a7470c15b1041ab2b396af088fe9990f7c79d50)" + ;; + aarch64) + age_tarball="$(download_and_verify_file https://github.com/FiloSottile/age/releases/download/v1.2.1/age-v1.2.1-linux-arm64.tar.gz 57fd79a7ece5fe501f351b9dd51a82fbee1ea8db65a8839db17f5c080245e99f)" + ;; + esac + + age_extract_dir="$(create_tmp_directory)" + execute tar -C "$age_extract_dir" -zxf "$age_tarball" age/age + move_to_bin "$age_extract_dir/age/age" + ;; + esac +} + +configure_core_dumps() { + # we only have core dumps on Linux + case "$os" in + linux) + # set up a directory that the test runner will look in after running tests + cores_dir="/var/bun-cores-$distro-$release-$arch" + sysctl_file="/etc/sysctl.d/local.conf" + create_directory "$cores_dir" + # ensure core_pattern will point there + # %e = executable filename + # %p = pid + append_file "$sysctl_file" "kernel.core_pattern = $cores_dir/%e-%p.core" + + # disable apport.service if it exists since it will override the core_pattern + if which systemctl >/dev/null; then + if systemctl list-unit-files apport.service >/dev/null; then + execute_sudo "$systemctl" disable --now apport.service + fi + fi + + # load the new configuration + execute_sudo sysctl -p "$sysctl_file" + + # ensure that a regular user will be able to run sysctl + if [ -d /sbin ]; then + append_to_path /sbin + fi + ;; + esac +} + clean_system() { if ! [ "$ci" = "1" ]; then return @@ -1387,6 +1453,8 @@ main() { install_build_essentials install_chromium install_fuse_python + install_age + configure_core_dumps clean_system } diff --git a/scripts/debug-coredump.ts b/scripts/debug-coredump.ts new file mode 100644 index 0000000000..625afb727a --- /dev/null +++ b/scripts/debug-coredump.ts @@ -0,0 +1,63 @@ +import fs from "node:fs"; +import { tmpdir } from "node:os"; +import { basename, join } from "node:path"; +import { parseArgs } from "node:util"; + +// usage: bun debug-coredump.ts +// -p (buildkite should show this) +// -b +// -c +// -d (default: lldb) +const { + values: { pid: stringPid, ["build-url"]: buildUrl, ["cores-url"]: coresUrl, debugger: debuggerPath }, +} = parseArgs({ + options: { + pid: { type: "string", short: "p" }, + ["build-url"]: { type: "string", short: "b" }, + ["cores-url"]: { type: "string", short: "c" }, + debugger: { type: "string", short: "d", default: "lldb" }, + }, +}); + +if (stringPid === undefined) throw new Error("no PID given"); +const pid = parseInt(stringPid); +if (buildUrl === undefined) throw new Error("no build-url given"); +if (coresUrl === undefined) throw new Error("no cores-url given"); +if (!process.env.AGE_CORES_IDENTITY?.startsWith("AGE-SECRET-KEY-")) + throw new Error("no identity given in $AGE_CORES_IDENTITY"); + +const id = Bun.hash(buildUrl + coresUrl).toString(36); +const dir = join(tmpdir(), `debug-coredump-${id}.tmp`); +fs.mkdirSync(dir, { recursive: true }); + +if (!fs.existsSync(join(dir, "bun-profile")) || !fs.existsSync(join(dir, `bun-${pid}.core`))) { + console.log("downloading bun-profile.zip"); + const zip = await (await fetch(buildUrl)).arrayBuffer(); + await Bun.write(join(dir, "bun-profile.zip"), zip); + // -j: junk paths (don't create directories when extracting) + // -o: overwrite without prompting + // -d: extract to this directory instead of cwd + await Bun.$`unzip -j -o ${join(dir, "bun-profile.zip")} -d ${dir}`; + + console.log("downloading cores"); + const cores = await (await fetch(coresUrl)).arrayBuffer(); + await Bun.$`bash -c ${`age -d -i <(echo "$AGE_CORES_IDENTITY")`} < ${cores} | tar -zxvC ${dir}`; + + console.log("moving cores out of nested directory"); + for await (const file of new Bun.Glob("bun-cores-*/bun-*.core").scan(dir)) { + fs.renameSync(join(dir, file), join(dir, basename(file))); + } +} else { + console.log(`already downloaded in ${dir}`); +} + +console.log("launching debugger:"); +console.log(`${debuggerPath} --core ${join(dir, `bun-${pid}.core`)} ${join(dir, "bun-profile")}`); + +const proc = await Bun.spawn([debuggerPath, "--core", join(dir, `bun-${pid}.core`), join(dir, "bun-profile")], { + stdin: "inherit", + stdout: "inherit", + stderr: "inherit", +}); +await proc.exited; +process.exit(proc.exitCode); diff --git a/scripts/runner.node.mjs b/scripts/runner.node.mjs index bd24218397..8a320d5612 100755 --- a/scripts/runner.node.mjs +++ b/scripts/runner.node.mjs @@ -51,6 +51,7 @@ import { isBuildkite, isCI, isGithubAction, + isLinux, isMacOS, isWindows, isX64, @@ -59,6 +60,7 @@ import { startGroup, tmpdir, unzip, + uploadArtifact, } from "./utils.mjs"; let isQuiet = false; const cwd = import.meta.dirname ? dirname(import.meta.dirname) : process.cwd(); @@ -146,6 +148,10 @@ const { values: options, positionals: filters } = parseArgs({ type: "boolean", default: isBuildkite, }, + ["coredump-upload"]: { + type: "boolean", + default: isBuildkite && isLinux, + }, }, }); @@ -605,6 +611,78 @@ async function runTests() { } } + if (options["coredump-upload"]) { + try { + // this sysctl is set in bootstrap.sh to /var/bun-cores-$distro-$release-$arch + const sysctl = await spawnSafe({ command: "sysctl", args: ["-n", "kernel.core_pattern"] }); + let coresDir = sysctl.stdout; + if (sysctl.ok) { + if (coresDir.startsWith("|")) { + throw new Error("cores are being piped not saved"); + } + // change /foo/bar/%e-%p.core to /foo/bar + coresDir = dirname(sysctl.stdout); + } else { + throw new Error(`Failed to check core_pattern: ${sysctl.error}`); + } + + const coresDirBase = dirname(coresDir); + const coresDirName = basename(coresDir); + const coreFileNames = readdirSync(coresDir); + + if (coreFileNames.length > 0) { + console.log(`found ${coreFileNames.length} cores in ${coresDir}`); + let totalBytes = 0; + let totalBlocks = 0; + for (const f of coreFileNames) { + const stat = statSync(join(coresDir, f)); + totalBytes += stat.size; + totalBlocks += stat.blocks; + } + console.log(`total apparent size = ${totalBytes} bytes`); + console.log(`total size on disk = ${512 * totalBlocks} bytes`); + const outdir = mkdtempSync(join(tmpdir(), "cores-upload")); + const outfileName = `${coresDirName}.tar.gz.age`; + const outfileAbs = join(outdir, outfileName); + + // This matches an age identity known by Bun employees. Core dumps from CI have to be kept + // secret since they will contain API keys. + const ageRecipient = "age1eunsrgxwjjpzr48hm0y98cw2vn5zefjagt4r0qj4503jg2nxedqqkmz6fu"; // reject external PRs changing this, see above + + // Run tar in the parent directory of coresDir so that it creates archive entries with + // coresDirName in them. This way when you extract the tarball you get a folder named + // bun-cores-XYZ containing core files, instead of a bunch of core files strewn in your + // current directory + const before = Date.now(); + const zipAndEncrypt = await spawnSafe({ + command: "bash", + args: [ + "-c", + // tar -S: handle sparse files efficiently + `set -euo pipefail && tar -Sc "$0" | gzip -1 | age -e -r ${ageRecipient} -o "$1"`, + // $0 + coresDirName, + // $1 + outfileAbs, + ], + cwd: coresDirBase, + stdout: () => {}, + timeout: 60_000, + }); + const elapsed = Date.now() - before; + if (!zipAndEncrypt.ok) { + throw new Error(zipAndEncrypt.error); + } + console.log(`saved core dumps to ${outfileAbs} (${statSync(outfileAbs).size} bytes) in ${elapsed} ms`); + await uploadArtifact(outfileAbs); + } else { + console.log(`no cores found in ${coresDir}`); + } + } catch (err) { + console.error("Error collecting and uploading core dumps:", err); + } + } + if (!isCI && !isQuiet) { console.table({ "Total Tests": okResults.length + failedResults.length + flakyResults.length, @@ -780,6 +858,7 @@ async function spawnSafe(options) { const [, message] = error || []; error = message ? message.split("\n")[0].toLowerCase() : "crash"; error = error.indexOf("\\n") !== -1 ? error.substring(0, error.indexOf("\\n")) : error; + error = `pid ${subprocess.pid} ${error}`; } else if (signalCode) { if (signalCode === "SIGTERM" && duration >= timeout) { error = "timeout"; @@ -871,7 +950,7 @@ async function spawnBun(execPath, { args, cwd, timeout, env, stdout, stderr }) { }; if (basename(execPath).includes("asan")) { - bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1"; + bunEnv.ASAN_OPTIONS = "allow_user_segv_handler=1:disable_coredump=0"; } if (isWindows && bunEnv.Path) { @@ -1023,7 +1102,7 @@ function getTestTimeout(testPath) { if (/integration|3rd_party|docker|bun-install-registry|v8/i.test(testPath)) { return integrationTimeout; } - if (/napi/i.test(testPath)) { + if (/napi/i.test(testPath) || /v8/i.test(testPath)) { return napiTimeout; } return testTimeout; diff --git a/scripts/utils.mjs b/scripts/utils.mjs index 0f5a166644..83bcdc6dbc 100755 --- a/scripts/utils.mjs +++ b/scripts/utils.mjs @@ -16,7 +16,7 @@ import { } from "node:fs"; import { connect } from "node:net"; import { hostname, homedir as nodeHomedir, tmpdir as nodeTmpdir, release, userInfo } from "node:os"; -import { dirname, join, relative, resolve } from "node:path"; +import { basename, dirname, join, relative, resolve } from "node:path"; import { normalize as normalizeWindows } from "node:path/win32"; export const isWindows = process.platform === "win32"; @@ -1370,13 +1370,16 @@ export async function getLastSuccessfulBuild() { } /** - * @param {string} filename - * @param {string} [cwd] + * @param {string} filename Absolute path to file to upload */ -export async function uploadArtifact(filename, cwd) { +export async function uploadArtifact(filename) { if (isBuildkite) { - const relativePath = relative(cwd ?? process.cwd(), filename); - await spawnSafe(["buildkite-agent", "artifact", "upload", relativePath], { cwd, stdio: "inherit" }); + await spawnSafe(["buildkite-agent", "artifact", "upload", basename(filename)], { + cwd: dirname(filename), + stdio: "inherit", + }); + } else { + console.warn(`not in buildkite. artifact ${filename} not uploaded.`); } } diff --git a/test/harness.ts b/test/harness.ts index b6a0ad0a83..be7dc5b950 100644 --- a/test/harness.ts +++ b/test/harness.ts @@ -64,7 +64,7 @@ export const bunEnv: NodeJS.Dict = { const ciEnv = { ...bunEnv }; if (isASAN) { - bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1"; + bunEnv.ASAN_OPTIONS ??= "allow_user_segv_handler=1:disable_coredump=0"; } if (isWindows) {