Merge branch 'main' of github.com:oven-sh/bun into ali/piscina-message-port-webkit

This commit is contained in:
Alistair Smith
2025-07-02 09:26:43 -07:00
981 changed files with 64236 additions and 45877 deletions

View File

@@ -1,4 +1,4 @@
# Version: 8
# Version: 9
# A script that installs the dependencies needed to build and test Bun.
# This should work on Windows 10 or newer with PowerShell.
@@ -240,11 +240,11 @@ function Install-Git {
}
function Install-NodeJs {
Install-Package nodejs -Command node -Version "22.9.0"
Install-Package nodejs -Command node -Version "24.3.0"
}
function Install-Bun {
Install-Package bun -Version "1.1.30"
Install-Package bun -Version "1.2.17"
}
function Install-Cygwin {

View File

@@ -1,5 +1,5 @@
#!/bin/sh
# Version: 11
# Version: 12
# A script that installs the dependencies needed to build and test Bun.
# This should work on macOS and Linux with a POSIX shell.
@@ -130,7 +130,7 @@ create_directory() {
create_tmp_directory() {
mktemp="$(require mktemp)"
path="$(execute "$mktemp" -d)"
grant_to_user "$path"
grant_to_user "$path"
print "$path"
}
@@ -191,7 +191,7 @@ download_file() {
fetch "$file_url" >"$file_tmp_path"
grant_to_user "$file_tmp_path"
print "$file_tmp_path"
}
@@ -317,7 +317,7 @@ check_operating_system() {
distro="$("$sw_vers" -productName)"
release="$("$sw_vers" -productVersion)"
fi
case "$arch" in
x64)
sysctl="$(which sysctl)"
@@ -534,7 +534,7 @@ check_ulimit() {
append_file "$dpkg_conf" "force-unsafe-io"
append_file "$dpkg_conf" "no-debsig"
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
execute_sudo create_directory "$(dirname "$apt_conf")"
append_file "$apt_conf" 'Acquire::Languages "none";'
append_file "$apt_conf" 'Acquire::GzipIndexes "true";'
@@ -711,12 +711,7 @@ install_common_software() {
}
nodejs_version_exact() {
# https://unofficial-builds.nodejs.org/download/release/
if ! [ "$abi" = "musl" ] && [ -n "$abi_version" ] && ! [ "$(compare_version "$abi_version" "2.27")" = "1" ]; then
print "16.9.1"
else
print "22.9.0"
fi
print "24.3.0"
}
nodejs_version() {
@@ -756,16 +751,68 @@ install_nodejs() {
}
install_nodejs_headers() {
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
nodejs_version="$(nodejs_version_exact)"
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$nodejs_version/node-v$nodejs_version-headers.tar.gz")"
nodejs_headers_dir="$(dirname "$nodejs_headers_tar")"
execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir"
nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include"
nodejs_headers_include="$nodejs_headers_dir/node-v$nodejs_version/include"
execute_sudo cp -R "$nodejs_headers_include/" "/usr"
# Also install to node-gyp cache locations for different node-gyp versions
# This ensures node-gyp finds headers without downloading them
setup_node_gyp_cache "$nodejs_version" "$nodejs_headers_dir/node-v$nodejs_version"
}
setup_node_gyp_cache() {
nodejs_version="$1"
headers_source="$2"
# Common node-gyp cache locations
cache_locations="
$HOME/.node-gyp/$nodejs_version
$HOME/.cache/node-gyp/$nodejs_version
$HOME/.npm/_cacache/node-gyp/$nodejs_version
$current_home/.node-gyp/$nodejs_version
$current_home/.cache/node-gyp/$nodejs_version
"
for cache_dir in $cache_locations; do
if ! [ -z "$cache_dir" ]; then
create_directory "$cache_dir"
# Copy headers
if [ -d "$headers_source/include" ]; then
cp -R "$headers_source/include" "$cache_dir/" 2>/dev/null || true
fi
# Create installVersion file (node-gyp expects this)
echo "11" > "$cache_dir/installVersion" 2>/dev/null || true
# For Linux, we don't need .lib files like Windows
# but create the directory structure node-gyp expects
case "$arch" in
x86_64|amd64)
create_directory "$cache_dir/lib/x64" 2>/dev/null || true
;;
aarch64|arm64)
create_directory "$cache_dir/lib/arm64" 2>/dev/null || true
;;
*)
create_directory "$cache_dir/lib" 2>/dev/null || true
;;
esac
# Set proper ownership for buildkite user
if [ "$ci" = "1" ] && [ "$user" = "buildkite-agent" ]; then
execute_sudo chown -R "$user:$user" "$cache_dir" 2>/dev/null || true
fi
fi
done
}
bun_version_exact() {
print "1.2.0"
print "1.2.17"
}
install_bun() {
@@ -910,7 +957,7 @@ install_llvm() {
bash="$(require bash)"
llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")"
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
# Install llvm-symbolizer explicitly to ensure it's available for ASAN
install_packages "llvm-$(llvm_version)-tools"
;;
@@ -930,7 +977,8 @@ install_llvm() {
}
install_gcc() {
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]
then
return
fi

740
scripts/buildkite-failures.ts Executable file
View File

@@ -0,0 +1,740 @@
#!/usr/bin/env bun
import { $ } from "bun";
import { existsSync } from "fs";
import { resolve } from "path";
// Check if we're in a TTY for color support
const isTTY = process.stdout.isTTY || process.env.FORCE_COLOR === "1";
// Get git root directory
let gitRoot = process.cwd();
try {
gitRoot = (await $`git rev-parse --show-toplevel`.quiet().text()).trim();
} catch {
// Fall back to current directory if not in a git repo
}
// Helper to convert file path to file:// URL if it exists
function fileToUrl(filePath) {
try {
// Extract just the file path without line numbers or other info
const match = filePath.match(/^([^\s:]+\.(ts|js|tsx|jsx|zig))/);
if (!match) return filePath;
const cleanPath = match[1];
const fullPath = resolve(gitRoot, cleanPath);
if (existsSync(fullPath)) {
return `file://${fullPath}`;
}
} catch (error) {
// If anything fails, just return the original path
}
return filePath;
}
// Color codes - simpler color scheme
const colors = {
reset: isTTY ? "\x1b[0m" : "",
bold: isTTY ? "\x1b[1m" : "",
dim: isTTY ? "\x1b[2m" : "",
red: isTTY ? "\x1b[31m" : "",
green: isTTY ? "\x1b[32m" : "",
bgBlue: isTTY ? "\x1b[44m" : "",
bgRed: isTTY ? "\x1b[41m" : "",
white: isTTY ? "\x1b[97m" : "",
};
// Parse command line arguments
const args = process.argv.slice(2);
const showWarnings = args.includes("--warnings") || args.includes("-w");
const showFlaky = args.includes("--flaky") || args.includes("-f");
const inputArg = args.find(arg => !arg.startsWith("-"));
// Determine what type of input we have
let buildNumber = null;
let branch = null;
if (inputArg) {
// BuildKite URL
if (inputArg.includes("buildkite.com")) {
const buildMatch = inputArg.match(/builds\/(\d+)/);
if (buildMatch) {
buildNumber = buildMatch[1];
}
}
// GitHub PR URL
else if (inputArg.includes("github.com") && inputArg.includes("/pull/")) {
const prMatch = inputArg.match(/pull\/(\d+)/);
if (prMatch) {
// Fetch PR info from GitHub API
const prNumber = prMatch[1];
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
if (prResponse.ok) {
const pr = await prResponse.json();
branch = pr.head.ref;
}
}
}
// Plain number or #number - assume it's a GitHub PR
else if (/^#?\d+$/.test(inputArg)) {
const prNumber = inputArg.replace("#", "");
const prResponse = await fetch(`https://api.github.com/repos/oven-sh/bun/pulls/${prNumber}`);
if (prResponse.ok) {
const pr = await prResponse.json();
branch = pr.head.ref;
} else {
// If not a valid PR, maybe it's a BuildKite build number
buildNumber = prNumber;
}
}
// Otherwise assume it's a branch name
else {
branch = inputArg;
}
} else {
// No input, use current branch
branch = (await $`git rev-parse --abbrev-ref HEAD`.text()).trim();
}
// If branch specified, find latest build
if (!buildNumber) {
const buildsUrl = `https://buildkite.com/bun/bun/builds?branch=${encodeURIComponent(branch)}`;
const response = await fetch(buildsUrl);
const html = await response.text();
const match = html.match(/\/bun\/bun\/builds\/(\d+)/);
if (!match) {
console.log(`No builds found for branch: ${branch}`);
process.exit(0);
}
buildNumber = match[1];
}
// Fetch build JSON
const buildResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}.json`);
const build = await buildResponse.json();
// Calculate time ago
const buildTime = new Date(build.started_at);
const now = new Date();
const diffMs = now.getTime() - buildTime.getTime();
const diffSecs = Math.floor(diffMs / 1000);
const diffMins = Math.floor(diffSecs / 60);
const diffHours = Math.floor(diffMins / 60);
const diffDays = Math.floor(diffHours / 24);
let timeAgo;
if (diffDays > 0) {
timeAgo = `${diffDays} day${diffDays !== 1 ? "s" : ""} ago`;
} else if (diffHours > 0) {
timeAgo = `${diffHours} hour${diffHours !== 1 ? "s" : ""} ago`;
} else if (diffMins > 0) {
timeAgo = `${diffMins} minute${diffMins !== 1 ? "s" : ""} ago`;
} else {
timeAgo = `${diffSecs} second${diffSecs !== 1 ? "s" : ""} ago`;
}
console.log(`${timeAgo} - build #${buildNumber} https://buildkite.com/bun/bun/builds/${buildNumber}\n`);
// Check if build passed
if (build.state === "passed") {
console.log(`${colors.green}✅ Passed!${colors.reset}`);
process.exit(0);
}
// Get failed jobs
const failedJobs =
build.jobs?.filter(job => job.exit_status && job.exit_status > 0 && !job.soft_failed && job.type === "script") || [];
// Platform emoji mapping
const platformMap = {
"darwin": "🍎",
"macos": "🍎",
"ubuntu": "🐧",
"debian": "🐧",
"alpine": "🐧",
"linux": "🐧",
"windows": "🪟",
"win": "🪟",
};
// Fetch annotations by scraping the build page
const pageResponse = await fetch(`https://buildkite.com/bun/bun/builds/${buildNumber}`);
const pageHtml = await pageResponse.text();
// Extract script tags using HTMLRewriter
let annotationsData = null;
const scriptContents: string[] = [];
const scriptRewriter = new HTMLRewriter().on("script", {
text(text) {
scriptContents.push(text.text);
},
});
await new Response(scriptRewriter.transform(new Response(pageHtml))).text();
// Find the registerRequest call in script contents
const fullScript = scriptContents.join("");
let registerRequestIndex = fullScript.indexOf("registerRequest");
// Find the AnnotationsListRendererQuery after registerRequest
if (registerRequestIndex !== -1) {
const afterRegisterRequest = fullScript.substring(registerRequestIndex);
const annotationsIndex = afterRegisterRequest.indexOf('"AnnotationsListRendererQuery"');
if (annotationsIndex === -1 || annotationsIndex > 100) {
// Not the right registerRequest call
registerRequestIndex = -1;
}
}
if (registerRequestIndex !== -1) {
try {
// Find the start of the JSON object (after the comma and any whitespace)
let jsonStart = registerRequestIndex;
// Skip to the opening brace, accounting for the function name and first parameter
let commaFound = false;
for (let i = registerRequestIndex; i < fullScript.length; i++) {
if (fullScript[i] === "," && !commaFound) {
commaFound = true;
} else if (commaFound && fullScript[i] === "{") {
jsonStart = i;
break;
}
}
// Find the matching closing brace, considering strings
let braceCount = 0;
let jsonEnd = jsonStart;
let inString = false;
let escapeNext = false;
for (let i = jsonStart; i < fullScript.length; i++) {
const char = fullScript[i];
if (escapeNext) {
escapeNext = false;
continue;
}
if (char === "\\") {
escapeNext = true;
continue;
}
if (char === '"' && !inString) {
inString = true;
} else if (char === '"' && inString) {
inString = false;
}
if (!inString) {
if (char === "{") braceCount++;
else if (char === "}") {
braceCount--;
if (braceCount === 0) {
jsonEnd = i + 1;
break;
}
}
}
}
const jsonString = fullScript.substring(jsonStart, jsonEnd);
annotationsData = JSON.parse(jsonString);
const edges = annotationsData?.build?.annotations?.edges || [];
// Just collect all unique annotations by context
const annotationsByContext = new Map();
for (const edge of edges) {
const node = edge.node;
if (!node || !node.context) continue;
// Skip if we already have this context
if (annotationsByContext.has(node.context)) {
continue;
}
annotationsByContext.set(node.context, {
context: node.context,
html: node.body?.html || "",
});
}
// Collect annotations
const annotations = Array.from(annotationsByContext.values());
// Group annotations by test file to detect duplicates
const annotationsByFile = new Map();
const nonFileAnnotations = [];
for (const annotation of annotations) {
// Check if this is a file-based annotation
const isFileAnnotation = annotation.context.match(/\.(ts|js|tsx|jsx|zig)$/);
if (isFileAnnotation) {
// Parse the HTML to extract all platform sections
const html = annotation.html || "";
// Check if this annotation contains multiple <details> sections (one per platform)
const detailsSections = html.match(/<details>[\s\S]*?<\/details>/g);
if (detailsSections && detailsSections.length > 1) {
// Multiple platform failures in one annotation
for (const section of detailsSections) {
const summaryMatch = section.match(
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
);
if (summaryMatch) {
const filePath = summaryMatch[1];
const failureInfo = summaryMatch[2];
const platformHtml = summaryMatch[3];
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
const fileKey = `${filePath}|${failureInfo}`;
if (!annotationsByFile.has(fileKey)) {
annotationsByFile.set(fileKey, {
filePath,
failureInfo,
platforms: [],
htmlParts: [],
originalAnnotations: [],
});
}
const entry = annotationsByFile.get(fileKey);
entry.platforms.push(platform);
entry.htmlParts.push(section);
entry.originalAnnotations.push({
...annotation,
html: section,
originalHtml: html,
});
}
}
} else {
// Single platform failure
const summaryMatch = html.match(
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
);
if (summaryMatch) {
const filePath = summaryMatch[1];
const failureInfo = summaryMatch[2];
const platformHtml = summaryMatch[3];
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
const fileKey = `${filePath}|${failureInfo}`;
if (!annotationsByFile.has(fileKey)) {
annotationsByFile.set(fileKey, {
filePath,
failureInfo,
platforms: [],
htmlParts: [],
originalAnnotations: [],
});
}
const entry = annotationsByFile.get(fileKey);
entry.platforms.push(platform);
entry.htmlParts.push(html);
entry.originalAnnotations.push(annotation);
} else {
// Couldn't parse, treat as non-file annotation
nonFileAnnotations.push(annotation);
}
}
} else {
// Non-file annotations (like "zig error")
nonFileAnnotations.push(annotation);
}
}
// Create merged annotations
const mergedAnnotations = [];
// Add file-based annotations
for (const [key, entry] of annotationsByFile) {
const { filePath, failureInfo, platforms, htmlParts, originalAnnotations } = entry;
// If we have multiple platforms with the same content, merge them
if (platforms.length > 1) {
// Create context string with all platforms
const uniquePlatforms = [...new Set(platforms)];
const context = `${filePath} - ${failureInfo} on ${uniquePlatforms.join(", ")}`;
// Check if all HTML parts are identical
const firstHtml = htmlParts[0];
const allSame = htmlParts.every(html => html === firstHtml);
let mergedHtml = "";
if (allSame) {
// If all the same, just use the first one
mergedHtml = firstHtml;
} else {
// If different, try to find one with the most color spans
let bestHtml = firstHtml;
let maxColorCount = (firstHtml.match(/term-fg/g) || []).length;
for (const html of htmlParts) {
const colorCount = (html.match(/term-fg/g) || []).length;
if (colorCount > maxColorCount) {
maxColorCount = colorCount;
bestHtml = html;
}
}
mergedHtml = bestHtml;
}
mergedAnnotations.push({
context,
html: mergedHtml,
merged: true,
platformCount: uniquePlatforms.length,
});
} else {
// Single platform, use original
mergedAnnotations.push(originalAnnotations[0]);
}
}
// Add non-file annotations
mergedAnnotations.push(...nonFileAnnotations);
// Sort annotations: ones with colors at the bottom
const annotationsWithColorInfo = mergedAnnotations.map(annotation => {
const html = annotation.html || "";
const hasColors = html.includes("term-fg") || html.includes("\\x1b[");
return { annotation, hasColors };
});
// Sort: no colors first, then colors
annotationsWithColorInfo.sort((a, b) => {
if (a.hasColors === b.hasColors) return 0;
return a.hasColors ? 1 : -1;
});
const sortedAnnotations = annotationsWithColorInfo.map(item => item.annotation);
// Count failures - look for actual test counts in the content
let totalFailures = 0;
let totalFlaky = 0;
// First try to count from annotations
for (const annotation of sortedAnnotations) {
const isFlaky = annotation.context.toLowerCase().includes("flaky");
const html = annotation.html || "";
// Look for patterns like "X tests failed" or "X failing"
const failureMatches = html.match(/(\d+)\s+(tests?\s+failed|failing)/gi);
if (failureMatches) {
for (const match of failureMatches) {
const count = parseInt(match.match(/\d+/)[0]);
if (isFlaky) {
totalFlaky += count;
} else {
totalFailures += count;
}
break; // Only count first match to avoid duplicates
}
} else if (!isFlaky) {
// If no count found, count the annotation itself
totalFailures++;
}
}
// If no annotations, use job count
if (totalFailures === 0 && failedJobs.length > 0) {
totalFailures = failedJobs.length;
}
// Display failure count
if (totalFailures > 0 || totalFlaky > 0) {
if (totalFailures > 0) {
console.log(`\n${colors.red}${colors.bold}${totalFailures} test failures${colors.reset}`);
}
if (showFlaky && totalFlaky > 0) {
console.log(`${colors.dim}${totalFlaky} flaky tests${colors.reset}`);
}
console.log();
} else if (failedJobs.length > 0) {
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
}
// Display all annotations
console.log();
for (const annotation of sortedAnnotations) {
// Skip flaky tests unless --flaky flag is set
if (!showFlaky && annotation.context.toLowerCase().includes("flaky")) {
continue;
}
// Display context header with background color
// For merged annotations, show platform info
if (annotation.merged && annotation.platformCount) {
// Extract filename and failure info from context
const contextParts = annotation.context.match(/^(.+?)\s+-\s+(.+?)\s+on\s+(.+)$/);
if (contextParts) {
const [, filename, failureInfo, platformsStr] = contextParts;
const fileUrl = fileToUrl(filename);
console.log(
`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platformsStr}${colors.reset}`,
);
} else {
const fileUrl = fileToUrl(annotation.context);
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
}
} else {
// Single annotation - need to extract platform info from HTML
const fileUrl = fileToUrl(annotation.context);
// Try to extract platform info from the HTML for single platform tests
const html = annotation.html || "";
const singlePlatformMatch = html.match(
/<summary>[\s\S]*?<a[^>]+><code>([^<]+)<\/code><\/a>\s*-\s*(\d+\s+\w+)\s+on\s+<a[^>]+>([\s\S]+?)<\/a>/,
);
if (singlePlatformMatch) {
const failureInfo = singlePlatformMatch[2];
const platformHtml = singlePlatformMatch[3];
const platform = platformHtml.replace(/<img[^>]+>/g, "").trim();
console.log(
`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} - ${failureInfo} ${colors.reset} ${colors.dim}on ${platform}${colors.reset}`,
);
} else {
console.log(`${colors.bgBlue}${colors.white}${colors.bold} ${fileUrl} ${colors.reset}`);
}
}
console.log();
// Process the annotation HTML to preserve colors
const html = annotation.html || "";
// First unescape unicode sequences
let unescapedHtml = html
.replace(/\\u003c/g, "<")
.replace(/\\u003e/g, ">")
.replace(/\\u0026/g, "&")
.replace(/\\"/g, '"')
.replace(/\\'/g, "'")
.replace(/\\u001b/g, "\x1b"); // Unescape ANSI escape sequences
// Handle newlines more carefully - BuildKite sometimes has actual newlines that shouldn't be there
// Only replace \n if it's actually an escaped newline, not part of the content
unescapedHtml = unescapedHtml.replace(/\\n/g, "\n");
// Also handle escaped ANSI sequences that might appear as \\x1b or \033
unescapedHtml = unescapedHtml.replace(/\\\\x1b/g, "\x1b").replace(/\\033/g, "\x1b");
// Convert HTML with ANSI color classes to actual ANSI codes
const termColors = {
// Standard colors (0-7)
"term-fg0": "\x1b[30m", // black
"term-fg1": "\x1b[31m", // red
"term-fg2": "\x1b[32m", // green
"term-fg3": "\x1b[33m", // yellow
"term-fg4": "\x1b[34m", // blue
"term-fg5": "\x1b[35m", // magenta
"term-fg6": "\x1b[36m", // cyan
"term-fg7": "\x1b[37m", // white
// Also support 30-37 format
"term-fg30": "\x1b[30m", // black
"term-fg31": "\x1b[31m", // red
"term-fg32": "\x1b[32m", // green
"term-fg33": "\x1b[33m", // yellow
"term-fg34": "\x1b[34m", // blue
"term-fg35": "\x1b[35m", // magenta
"term-fg36": "\x1b[36m", // cyan
"term-fg37": "\x1b[37m", // white
// Bright colors with 'i' prefix
"term-fgi90": "\x1b[90m", // bright black
"term-fgi91": "\x1b[91m", // bright red
"term-fgi92": "\x1b[92m", // bright green
"term-fgi93": "\x1b[93m", // bright yellow
"term-fgi94": "\x1b[94m", // bright blue
"term-fgi95": "\x1b[95m", // bright magenta
"term-fgi96": "\x1b[96m", // bright cyan
"term-fgi97": "\x1b[97m", // bright white
// Also support without 'i'
"term-fg90": "\x1b[90m", // bright black
"term-fg91": "\x1b[91m", // bright red
"term-fg92": "\x1b[92m", // bright green
"term-fg93": "\x1b[93m", // bright yellow
"term-fg94": "\x1b[94m", // bright blue
"term-fg95": "\x1b[95m", // bright magenta
"term-fg96": "\x1b[96m", // bright cyan
"term-fg97": "\x1b[97m", // bright white
// Background colors
"term-bg40": "\x1b[40m", // black
"term-bg41": "\x1b[41m", // red
"term-bg42": "\x1b[42m", // green
"term-bg43": "\x1b[43m", // yellow
"term-bg44": "\x1b[44m", // blue
"term-bg45": "\x1b[45m", // magenta
"term-bg46": "\x1b[46m", // cyan
"term-bg47": "\x1b[47m", // white
// Text styles
"term-bold": "\x1b[1m",
"term-dim": "\x1b[2m",
"term-italic": "\x1b[3m",
"term-underline": "\x1b[4m",
};
let text = unescapedHtml;
// Convert color spans to ANSI codes if TTY
if (isTTY) {
// Convert spans with color classes to ANSI codes
for (const [className, ansiCode] of Object.entries(termColors)) {
// Match spans that contain the class name (might have multiple classes)
// Need to handle both formats: <span class="..."> and <span ... class="...">
const regex = new RegExp(`<span[^>]*class="[^"]*\\b${className}\\b[^"]*"[^>]*>([\\s\\S]*?)</span>`, "g");
text = text.replace(regex, (match, content) => {
// Don't add reset if the content already has ANSI codes
if (content.includes("\x1b[")) {
return `${ansiCode}${content}`;
}
return `${ansiCode}${content}${colors.reset}`;
});
}
}
// Check if we already have ANSI codes in the text after processing
const hasExistingAnsi = text.includes("\x1b[");
// Check for broken color patterns (single characters wrapped in colors)
// If we see patterns like green[, red text, green], it's likely broken
// Also check for patterns like: green[, then reset, then text, then red text, then reset, then green]
const hasBrokenColors =
text.includes("\x1b[32m[") ||
text.includes("\x1b[32m]") ||
(text.includes("\x1b[32m✓") && text.includes("\x1b[31m") && text.includes("ms]"));
if (hasBrokenColors) {
// Remove all ANSI codes if the coloring looks broken
text = text.replace(/\x1b\[[0-9;]*m/g, "");
}
// Remove all HTML tags, but be careful with existing ANSI codes
text = text
.replace(/<pre[^>]*><code[^>]*>([\s\S]*?)<\/code><\/pre>/g, "$1")
.replace(/<br\s*\/?>/g, "\n")
.replace(/<\/p>/g, "\n")
.replace(/<p>/g, "")
.replace(/<[^>]+>/g, "")
.replace(/&lt;/g, "<")
.replace(/&gt;/g, ">")
.replace(/&amp;/g, "&")
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&nbsp;/g, " ")
.replace(/\u00A0/g, " ") // Non-breaking space
.trim();
// Remove excessive blank lines - be more aggressive
text = text.replace(/\n\s*\n\s*\n+/g, "\n\n"); // Replace 3+ newlines with 2
text = text.replace(/\n\s*\n/g, "\n"); // Replace 2 newlines with 1
// For zig error annotations, check if there are multiple platform sections
let handled = false;
if (annotation.context.includes("zig error")) {
// Split by platform headers within the content
const platformSections = text.split(/(?=^\s*[^\s\/]+\.zig\s*-\s*zig error\s+on\s+)/m);
if (platformSections.length > 1) {
// Skip the first empty section if it exists
const sections = platformSections.filter(s => s.trim());
if (sections.length > 1) {
// We have multiple platform errors in one annotation
// Extract unique platform names
const platforms = [];
for (const section of sections) {
const platformMatch = section.match(/on\s+(\S+)/);
if (platformMatch) {
platforms.push(platformMatch[1]);
}
}
// Show combined header with background color
const filename = annotation.context;
const fileUrl = fileToUrl(filename);
const platformText = platforms.join(", ");
console.log(
`${colors.bgRed}${colors.white}${colors.bold} ${fileUrl} ${colors.reset} ${colors.dim}on ${platformText}${colors.reset}`,
);
console.log();
// Show only the first error detail (they're the same)
const firstError = sections[0];
const errorLines = firstError.split("\n");
// Skip the platform-specific header line and remove excessive blank lines
let previousWasBlank = false;
for (let i = 0; i < errorLines.length; i++) {
const line = errorLines[i];
if (i === 0 && line.match(/\.zig\s*-\s*zig error\s+on\s+/)) {
continue; // Skip platform header
}
// Skip multiple consecutive blank lines
const isBlank = line.trim() === "";
if (isBlank && previousWasBlank) {
continue;
}
previousWasBlank = isBlank;
console.log(line); // No indentation
}
console.log();
handled = true;
}
}
}
// Normal processing for other annotations
if (!handled) {
// For merged annotations, skip the duplicate headers within the content
const isMerged = annotation.merged || (annotation.platformCount && annotation.platformCount > 1);
// Process lines, removing excessive blank lines
let previousWasBlank = false;
text.split("\n").forEach((line, index) => {
// For merged annotations, skip duplicate platform headers
if (
isMerged &&
index > 0 &&
line.match(/^[^\s\/]+\.(ts|js|tsx|jsx|zig)\s*-\s*\d+\s+(failing|errors?|warnings?)\s+on\s+/)
) {
return; // Skip duplicate headers in merged content
}
// Skip multiple consecutive blank lines
const isBlank = line.trim() === "";
if (isBlank && previousWasBlank) {
return;
}
previousWasBlank = isBlank;
console.log(line); // No indentation
});
console.log();
}
}
} catch (e) {
console.error("Failed to parse annotations:", e);
console.log("\nView detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}
} else {
console.log(`\n${colors.red}${colors.bold}${failedJobs.length} job failures${colors.reset}\n`);
console.log("View detailed results at:");
console.log(` https://buildkite.com/bun/bun/builds/${buildNumber}#annotations`);
}

125
scripts/longest.js Normal file
View File

@@ -0,0 +1,125 @@
const fs = require("fs");
const path = require("path");
// Regex patterns for different types of top-level declarations
const DECLARATION_PATTERN =
// pub? (export|extern)? (const|fn|var) name
/^(pub\s+)?(export\s+|extern\s+)?(const|fn|var)\s+([a-zA-Z_][a-zA-Z0-9_]*)/;
function findDeclarations(filePath) {
const content = fs.readFileSync(filePath, "utf8");
const lines = content.split("\n");
const declarations = [];
// First pass: collect all declarations with their line numbers
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
const line = lines[lineNum];
// Skip empty lines and comments
if (!line || line.trim().startsWith("//") || line.trim().startsWith("///")) {
continue;
}
// Only process top-level declarations (no indentation)
if (line.startsWith(" ") || line.startsWith("\t")) {
continue;
}
const trimmedLine = line.trim();
// Check each pattern
const match = trimmedLine.match(DECLARATION_PATTERN);
if (match) {
// Extract the name from the match
const name = match[match.length - 1]; // Last capture group is the name
declarations.push({
name,
match: match[0],
line: lineNum + 1,
type: getDeclarationType(match[0]),
fullLine: trimmedLine,
startLine: lineNum,
});
}
}
// Second pass: calculate sizes based on next declaration's start line
for (let i = 0; i < declarations.length; i++) {
const currentDecl = declarations[i];
const nextDecl = declarations[i + 1];
if (nextDecl) {
// Size is from current declaration start to next declaration start
currentDecl.size = nextDecl.startLine - currentDecl.startLine;
} else {
// Last declaration: size is from current declaration start to end of file
currentDecl.size = lines.length - currentDecl.startLine;
}
}
return declarations;
}
function getDeclarationType(matchText) {
if (matchText.includes("const")) return "const";
if (matchText.includes("fn")) return "fn";
if (matchText.includes("var")) return "var";
return "unknown";
}
function main() {
const args = process.argv.slice(2);
if (args.length === 0) {
console.error("Usage: bun longest.js <zig-file>");
console.error("Example: bun longest.js src/walker_skippable.zig");
process.exit(1);
}
const filePath = args[0];
if (!fs.existsSync(filePath)) {
console.error(`File not found: ${filePath}`);
process.exit(1);
}
if (!filePath.endsWith(".zig")) {
console.error("Please provide a .zig file");
process.exit(1);
}
try {
const declarations = findDeclarations(filePath);
if (declarations.length === 0) {
console.log("No top-level declarations found.");
return;
}
console.log(`Found ${declarations.length} top-level declarations in ${filePath}:\n`);
// Sort by declaration size (smallest first)
declarations.sort((a, b) => a.size - b.size);
// Find the longest name for formatting
const maxNameLength = Math.max(...declarations.map(d => d.match.length));
const maxTypeLength = Math.max(...declarations.map(d => d.type.length));
console.log(`${"Name".padEnd(maxNameLength + 2)} ${"Type".padEnd(maxTypeLength + 2)} ${"Num Lines".padEnd(6)}`);
console.log("-".repeat(maxNameLength + maxTypeLength + 15));
declarations.forEach(decl => {
console.log(
`${decl.match.padEnd(maxNameLength + 2)} ${decl.type.padEnd(maxTypeLength + 2)} ${decl.size.toString().padEnd(6)}`,
);
});
} catch (error) {
console.error("Error reading file:", error.message);
process.exit(1);
}
}
if (require.main === module) {
main();
}

View File

@@ -230,6 +230,27 @@ function getTestExpectations() {
return expectations;
}
/**
* Returns whether we should validate exception checks running the given test
* @param {string} test
* @returns {boolean}
*/
const shouldValidateExceptions = (() => {
let skipArray;
return test => {
if (!skipArray) {
const path = join(cwd, "test/no-validate-exceptions.txt");
if (!existsSync(path)) {
skipArray = [];
}
skipArray = readFileSync(path, "utf-8")
.split("\n")
.filter(line => !line.startsWith("#") && line.length > 0);
}
return !(skipArray.includes(test) || skipArray.includes("test/" + test));
};
})();
/**
* @param {string} testPath
* @returns {string[]}
@@ -432,16 +453,20 @@ async function runTests() {
const runWithBunTest =
title.includes("needs-test") || testContent.includes("bun:test") || testContent.includes("node:test");
const subcommand = runWithBunTest ? "test" : "run";
const env = {
FORCE_COLOR: "0",
NO_COLOR: "1",
BUN_DEBUG_QUIET_LOGS: "1",
};
if (basename(execPath).includes("asan") && shouldValidateExceptions(testPath)) {
env.BUN_JSC_validateExceptionChecks = "1";
}
await runTest(title, async () => {
const { ok, error, stdout } = await spawnBun(execPath, {
cwd: cwd,
args: [subcommand, "--config=" + join(import.meta.dirname, "../bunfig.node-test.toml"), absoluteTestPath],
timeout: getNodeParallelTestTimeout(title),
env: {
FORCE_COLOR: "0",
NO_COLOR: "1",
BUN_DEBUG_QUIET_LOGS: "1",
},
env,
stdout: chunk => pipeTestStdout(process.stdout, chunk),
stderr: chunk => pipeTestStdout(process.stderr, chunk),
});
@@ -969,13 +994,18 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
testArgs.push(absPath);
const env = {
GITHUB_ACTIONS: "true", // always true so annotations are parsed
};
if (basename(execPath).includes("asan") && shouldValidateExceptions(relative(cwd, absPath))) {
env.BUN_JSC_validateExceptionChecks = "1";
}
const { ok, error, stdout } = await spawnBun(execPath, {
args: isReallyTest ? testArgs : [...args, absPath],
cwd: options["cwd"],
timeout: isReallyTest ? timeout : 30_000,
env: {
GITHUB_ACTIONS: "true", // always true so annotations are parsed
},
env,
stdout: chunk => pipeTestStdout(process.stdout, chunk),
stderr: chunk => pipeTestStdout(process.stderr, chunk),
});

394
scripts/sortImports.ts Normal file
View File

@@ -0,0 +1,394 @@
import { readdirSync } from "fs";
import path from "path";
// Parse command line arguments
const args = process.argv.slice(2);
const filePaths = args.filter(arg => !arg.startsWith("-"));
const usage = String.raw`
__ .__ __
____________________/ |_ _______|__| _____ ______ ____________/ |_ ______
\___ / _ \_ __ \ __\ \___ / |/ \\____ \ / _ \_ __ \ __\/ ___/
/ ( <_> ) | \/| | / /| | Y Y \ |_> > <_> ) | \/| | \___ \
/_____ \____/|__| |__| /_____ \__|__|_| / __/ \____/|__| |__| /____ >
\/ \/ \/|__| \/
Usage: bun scripts/sortImports [options] <files...>
Options:
--help Show this help message
--no-include-pub Exclude pub imports from sorting
--no-remove-unused Don't remove unused imports
--include-unsorted Process files even if they don't have @sortImports marker
Examples:
bun scripts/sortImports src
`.slice(1);
if (args.includes("--help")) {
console.log(usage);
process.exit(0);
}
if (filePaths.length === 0) {
console.error(usage);
process.exit(1);
}
const config = {
includePub: !args.includes("--no-include-pub"),
removeUnused: !args.includes("--no-remove-unused"),
includeUnsorted: args.includes("--include-unsorted"),
};
// Type definitions
type Declaration = {
index: number;
key: string;
value: string;
segments: string[] | null;
whole: string;
last?: string;
wholepath?: string[];
};
// Parse declarations from the file
function parseDeclarations(
lines: string[],
fileContents: string,
): {
declarations: Map<string, Declaration>;
unusedLineIndices: number[];
} {
const declarations = new Map<string, Declaration>();
const unusedLineIndices: number[] = [];
// for stability
const sortedLineKeys = [...lines.keys()].sort((a, b) => (lines[a] < lines[b] ? -1 : lines[a] > lines[b] ? 1 : 0));
for (const i of sortedLineKeys) {
const line = lines[i];
if (line === "// @sortImports") {
lines[i] = "";
continue;
}
const inlineDeclPattern = /^(?:pub )?const ([a-zA-Z0-9_]+) = (.+);$/;
const match = line.match(inlineDeclPattern);
if (!match) continue;
const name = match[1];
const value = match[2];
// Skip if the previous line has a doc comment
const prevLine = lines[i - 1] ?? "";
if (prevLine.startsWith("///")) {
continue;
}
// Skip unused declarations (non-public declarations that appear only once)
if (config.removeUnused && !line.includes("pub ")) {
const escapedName = name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
const expectedCount = (line.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
const actualCount = (fileContents.match(new RegExp(`\\b${escapedName}\\b`, "g")) || []).length;
if (expectedCount === actualCount) {
// unused decl
unusedLineIndices.push(i);
continue;
}
}
if (!config.includePub && line.includes("pub ")) {
continue;
}
declarations.set(name, {
whole: line,
index: i,
key: name,
value,
segments: parseSegments(value),
});
}
return { declarations, unusedLineIndices };
}
// Validate if a segment is a valid identifier
function isValidSegment(segment: string): boolean {
if (segment.startsWith("@import(") || segment === "@This()") {
return true;
}
return segment.match(/^[a-zA-Z0-9_]+$/) != null;
}
// Parse import path segments from a value
function parseSegments(value: string): null | string[] {
if (value.startsWith("@import(")) {
const rightBracketIndex = value.indexOf(")");
if (rightBracketIndex === -1) return null;
const importPart = value.slice(0, rightBracketIndex + 1);
const remainingPart = value.slice(rightBracketIndex + 1);
if (remainingPart.startsWith(".")) {
const segments = remainingPart.slice(1).split(".");
if (!segments.every(segment => isValidSegment(segment))) return null;
return [importPart, ...segments];
} else if (remainingPart === "") {
return [importPart];
} else {
return null;
}
} else {
const segments = value.split(".");
if (!segments.every(segment => isValidSegment(segment))) return null;
return segments;
}
}
// Resolve the first segment of an import path
function resolveFirstSegment(firstSegment: string, declarations: Map<string, Declaration>): null | string[] {
if (firstSegment.startsWith("@import(") || firstSegment.startsWith("@This()")) {
return [firstSegment];
} else {
const declaration = declarations.get(firstSegment);
if (!declaration) {
return null; // Unknown declaration
}
const subFirstSegment = declaration.segments?.[0];
if (!subFirstSegment) {
return null; // Invalid declaration
}
const resolvedSubFirst = resolveFirstSegment(subFirstSegment, declarations);
if (!resolvedSubFirst) {
return null; // Unable to resolve
}
return [...resolvedSubFirst, ...(declaration.segments?.slice(1) ?? [])];
}
}
type Group = {
keySegments: string[];
declarations: Declaration[];
};
// Group declarations by their import paths
function groupDeclarationsByImportPath(declarations: Map<string, Declaration>): Map<string, Group> {
const groups = new Map<string, Group>();
for (const declaration of declarations.values()) {
if (!declaration.segments || declaration.segments.length < 1) {
continue;
}
const firstSegment = declaration.segments[0];
const resolvedFirst = resolveFirstSegment(firstSegment, declarations);
if (!resolvedFirst) {
continue;
}
const remainingSegments = declaration.segments.slice(1);
const fullPath = [...resolvedFirst, ...remainingSegments];
const lastSegment = fullPath.pop();
if (!lastSegment) {
continue;
}
const groupKey = fullPath.join(".");
if (!groups.has(groupKey)) {
groups.set(groupKey, { keySegments: fullPath, declarations: [] });
}
groups.get(groupKey)!.declarations.push(declaration);
declaration.last = lastSegment;
declaration.wholepath = [...fullPath, lastSegment];
}
return groups;
}
// Merge single-item groups into their parent groups
function mergeSingleItemGroups(groups: Map<string, Group>): void {
while (true) {
let hasChanges = false;
for (const [groupKey, group] of groups.entries()) {
if (group.declarations.length === 1) {
const gcsplit = [...group.keySegments];
while (gcsplit.pop()) {
const parentKey = gcsplit.join(".");
if (groups.has(parentKey)) {
groups.get(parentKey)!.declarations.push(group.declarations[0]);
groups.delete(groupKey);
hasChanges = true;
break;
}
}
}
}
if (!hasChanges) break;
}
}
// Move items with child groups to the top of those child groups
function promoteItemsWithChildGroups(groups: Map<string, Group>): void {
for (const [groupKey, group] of groups.entries()) {
for (let i = 0; i < group.declarations.length; ) {
const item = group.declarations[i];
const childGroupKey = (groupKey ? groupKey + "." : "") + item.last;
if (groups.has(childGroupKey)) {
groups.get(childGroupKey)!.declarations.unshift(item);
group.declarations.splice(i, 1);
} else {
i++;
}
}
}
}
// Sort groups and their declarations
function sortGroupsAndDeclarations(groups: Map<string, Group>): string[] {
// Sort declarations within each group
for (const group of groups.values()) {
group.declarations.sort((a, b) => {
if (a.wholepath?.length !== b.wholepath?.length) {
return (a.wholepath?.length ?? 0) - (b.wholepath?.length ?? 0);
}
return a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
});
}
// Sort group keys alphabetically
return Array.from(groups.keys()).sort((a, b) => {
return a < b ? -1 : a > b ? 1 : 0;
});
}
// Generate the sorted output
function generateSortedOutput(lines: string[], groups: Map<string, Group>, sortedGroupKeys: string[]): string[] {
const outputLines = [...lines];
outputLines.push("");
outputLines.push("// @sortImports");
for (const groupKey of sortedGroupKeys) {
const groupDeclarations = groups.get(groupKey)!;
if (!groupDeclarations?.declarations.length) continue;
// Add spacing between groups
outputLines.push("");
// Add declarations to output and mark original lines for removal
for (const declaration of groupDeclarations.declarations) {
outputLines.push(declaration.whole);
outputLines[declaration.index] = "";
}
}
return outputLines;
}
// Main execution function for a single file
async function processFile(filePath: string): Promise<void> {
const originalFileContents = await Bun.file(filePath).text();
let fileContents = originalFileContents;
if (!config.includeUnsorted && !originalFileContents.includes("// @sortImports")) {
return;
}
console.log(`Processing: ${filePath}`);
let needsRecurse = true;
while (needsRecurse) {
needsRecurse = false;
const lines = fileContents.split("\n");
const { declarations, unusedLineIndices } = parseDeclarations(lines, fileContents);
const groups = groupDeclarationsByImportPath(declarations);
promoteItemsWithChildGroups(groups);
mergeSingleItemGroups(groups);
const sortedGroupKeys = sortGroupsAndDeclarations(groups);
const sortedLines = generateSortedOutput(lines, groups, sortedGroupKeys);
// Remove unused declarations
if (config.removeUnused) {
for (const line of unusedLineIndices) {
sortedLines[line] = "";
needsRecurse = true;
}
}
fileContents = sortedLines.join("\n");
}
// Remove any leading newlines
fileContents = fileContents.replace(/^\n+/, "");
// Maximum of one empty line
fileContents = fileContents.replace(/\n\n+/g, "\n\n");
// Ensure exactly one trailing newline
fileContents = fileContents.replace(/\s*$/, "\n");
// If the file is empty, remove the trailing newline
if (fileContents === "\n") fileContents = "";
if (fileContents === originalFileContents) {
console.log(`✓ No changes: ${filePath}`);
return;
}
// Write the sorted file
await Bun.write(filePath, fileContents);
console.log(`✓ Done: ${filePath}`);
}
// Process all files
async function main() {
let successCount = 0;
let errorCount = 0;
for (const filePath of filePaths) {
const stat = await Bun.file(filePath).stat();
if (stat.isDirectory()) {
const files = readdirSync(filePath, { recursive: true });
for (const file of files) {
if (typeof file !== "string" || !file.endsWith(".zig")) continue;
try {
await processFile(path.join(filePath, file));
successCount++;
} catch (error) {
errorCount++;
console.error(`Failed to process ${filePath}`);
}
}
continue;
}
try {
await processFile(filePath);
successCount++;
} catch (error) {
errorCount++;
console.error(`Failed to process ${filePath}`);
}
}
console.log(`\nSummary: ${successCount} files processed successfully, ${errorCount} errors`);
if (errorCount > 0) {
process.exit(1);
}
}
main();