mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 07:28:53 +00:00
Compare commits
1 Commits
dylan/test
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b390a11b0 |
78
.agent/agent.mjs
Normal file
78
.agent/agent.mjs
Normal file
@@ -0,0 +1,78 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { readFileSync, existsSync } from "node:fs";
|
||||
import { parseArgs } from "node:util";
|
||||
|
||||
const { positionals, values } = parseArgs({
|
||||
allowPositionals: true,
|
||||
options: {
|
||||
help: {
|
||||
type: "boolean",
|
||||
short: "h",
|
||||
default: false,
|
||||
},
|
||||
interactive: {
|
||||
type: "boolean",
|
||||
short: "i",
|
||||
default: false,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (values.help || positionals.length === 0) {
|
||||
console.log("Usage: node agent.mjs <prompt_name> [extra_args...]");
|
||||
console.log("Example: node agent.mjs triage fix bug in authentication");
|
||||
console.log("Options:");
|
||||
console.log(" -h, --help Show this help message");
|
||||
console.log(" -i, --interactive Run in interactive mode");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const promptName = positionals[0].toUpperCase();
|
||||
const promptFile = `.agent/${promptName}.md`;
|
||||
const extraArgs = positionals.slice(1);
|
||||
|
||||
if (!existsSync(promptFile)) {
|
||||
console.error(`Error: Prompt file "${promptFile}" not found`);
|
||||
console.error(`Available prompts should be named like: .agent/triage.md, .agent/debug.md, etc.`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
let prompt = readFileSync(promptFile, "utf-8");
|
||||
|
||||
const githubEnvs = Object.entries(process.env)
|
||||
.filter(([key]) => key.startsWith("GITHUB_"))
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
|
||||
if (githubEnvs.length > 0) {
|
||||
const githubContext = `## GitHub Environment\n\n${githubEnvs
|
||||
.map(([key, value]) => `**${key}**: \`${value}\``)
|
||||
.join("\n")}\n\n---\n\n`;
|
||||
prompt = githubContext + prompt;
|
||||
}
|
||||
|
||||
if (extraArgs.length > 0) {
|
||||
const extraArgsContext = `\n\n## Additional Arguments\n\n${extraArgs.join(" ")}\n\n---\n\n`;
|
||||
prompt = prompt + extraArgsContext;
|
||||
}
|
||||
|
||||
const claudeArgs = [prompt, "--allowedTools=Edit,Write,Replace,Search", "--output-format=json"];
|
||||
if (!values.interactive) {
|
||||
claudeArgs.unshift("--print");
|
||||
}
|
||||
|
||||
const { status, error } = spawnSync("claude", claudeArgs, {
|
||||
stdio: "inherit",
|
||||
encoding: "utf-8",
|
||||
});
|
||||
|
||||
if (error) {
|
||||
console.error("Error running claude:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
process.exit(status || 0);
|
||||
} catch (error) {
|
||||
console.error(`Error reading prompt file "${promptFile}":`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -303,34 +303,9 @@ function getCppAgent(platform, options) {
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "c8g.4xlarge" : "c7i.4xlarge",
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getLinkBunAgent(platform, options) {
|
||||
const { os, arch, distro } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
queue: `build-${os}`,
|
||||
os,
|
||||
arch,
|
||||
};
|
||||
}
|
||||
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.large" : "r7i.large",
|
||||
});
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: arch === "aarch64" ? "r8g.xlarge" : "r7i.xlarge",
|
||||
instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge",
|
||||
cpuCount: 32,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -371,7 +346,7 @@ function getZigAgent(platform, options) {
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getTestAgent(platform, options) {
|
||||
const { os, arch, profile } = platform;
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -381,7 +356,7 @@ function getTestAgent(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: delete this block when we upgrade to mimalloc v3
|
||||
// TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead.
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
@@ -391,13 +366,6 @@ function getTestAgent(platform, options) {
|
||||
}
|
||||
|
||||
if (arch === "aarch64") {
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c8g.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -405,13 +373,6 @@ function getTestAgent(platform, options) {
|
||||
});
|
||||
}
|
||||
|
||||
if (profile === "asan") {
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
});
|
||||
}
|
||||
return getEc2Agent(platform, options, {
|
||||
instanceType: "c7i.xlarge",
|
||||
cpuCount: 2,
|
||||
@@ -448,17 +409,11 @@ function getBuildEnv(target, options) {
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getBuildCommand(target, options, label) {
|
||||
function getBuildCommand(target, options) {
|
||||
const { profile } = target;
|
||||
const buildProfile = profile || "release";
|
||||
|
||||
if (target.os === "windows" && label === "build-bun") {
|
||||
// Only sign release builds, not canary builds (DigiCert charges per signature)
|
||||
const enableSigning = !options.canary ? " -DENABLE_WINDOWS_CODESIGNING=ON" : "";
|
||||
return `bun run build:${buildProfile}${enableSigning}`;
|
||||
}
|
||||
|
||||
return `bun run build:${buildProfile}`;
|
||||
const label = profile || "release";
|
||||
return `bun run build:${label}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -547,15 +502,14 @@ function getLinkBunStep(platform, options) {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
depends_on: [`${getTargetKey(platform)}-build-cpp`, `${getTargetKey(platform)}-build-zig`],
|
||||
agents: getLinkBunAgent(platform, options),
|
||||
agents: getCppAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
|
||||
command: `${getBuildCommand(platform, options)} --target bun`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -616,9 +570,6 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
timeout_in_minutes: profile === "asan" || os === "windows" ? 45 : 30,
|
||||
env: {
|
||||
ASAN_OPTIONS: "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=0",
|
||||
},
|
||||
command:
|
||||
os === "windows"
|
||||
? `node .\\scripts\\runner.node.mjs ${args.join(" ")}`
|
||||
|
||||
@@ -1,464 +0,0 @@
|
||||
# Windows Code Signing Script for Bun
|
||||
# Uses DigiCert KeyLocker for Authenticode signing
|
||||
# Native PowerShell implementation - no path translation issues
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunProfileExe,
|
||||
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunExe
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
|
||||
# Logging functions
|
||||
function Log-Info {
|
||||
param([string]$Message)
|
||||
Write-Host "[INFO] $Message" -ForegroundColor Cyan
|
||||
}
|
||||
|
||||
function Log-Success {
|
||||
param([string]$Message)
|
||||
Write-Host "[SUCCESS] $Message" -ForegroundColor Green
|
||||
}
|
||||
|
||||
function Log-Error {
|
||||
param([string]$Message)
|
||||
Write-Host "[ERROR] $Message" -ForegroundColor Red
|
||||
}
|
||||
|
||||
function Log-Debug {
|
||||
param([string]$Message)
|
||||
if ($env:DEBUG -eq "true" -or $env:DEBUG -eq "1") {
|
||||
Write-Host "[DEBUG] $Message" -ForegroundColor Gray
|
||||
}
|
||||
}
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
if ($null -eq $vsDir) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
}
|
||||
|
||||
# Check for required environment variables
|
||||
function Check-Environment {
|
||||
Log-Info "Checking environment variables..."
|
||||
|
||||
$required = @{
|
||||
"SM_API_KEY" = $env:SM_API_KEY
|
||||
"SM_CLIENT_CERT_PASSWORD" = $env:SM_CLIENT_CERT_PASSWORD
|
||||
"SM_KEYPAIR_ALIAS" = $env:SM_KEYPAIR_ALIAS
|
||||
"SM_HOST" = $env:SM_HOST
|
||||
"SM_CLIENT_CERT_FILE" = $env:SM_CLIENT_CERT_FILE
|
||||
}
|
||||
|
||||
$missing = @()
|
||||
foreach ($key in $required.Keys) {
|
||||
if ([string]::IsNullOrEmpty($required[$key])) {
|
||||
$missing += $key
|
||||
} else {
|
||||
Log-Debug "$key is set (length: $($required[$key].Length))"
|
||||
}
|
||||
}
|
||||
|
||||
if ($missing.Count -gt 0) {
|
||||
throw "Missing required environment variables: $($missing -join ', ')"
|
||||
}
|
||||
|
||||
Log-Success "All required environment variables are present"
|
||||
}
|
||||
|
||||
# Setup certificate file
|
||||
function Setup-Certificate {
|
||||
Log-Info "Setting up certificate..."
|
||||
|
||||
# Always try to decode as base64 first
|
||||
# If it fails, then treat as file path
|
||||
try {
|
||||
Log-Info "Attempting to decode certificate as base64..."
|
||||
Log-Debug "Input string length: $($env:SM_CLIENT_CERT_FILE.Length) characters"
|
||||
|
||||
$tempCertPath = Join-Path $env:TEMP "digicert_cert_$(Get-Random).p12"
|
||||
|
||||
# Try to decode as base64
|
||||
$certBytes = [System.Convert]::FromBase64String($env:SM_CLIENT_CERT_FILE)
|
||||
[System.IO.File]::WriteAllBytes($tempCertPath, $certBytes)
|
||||
|
||||
# Validate the decoded certificate size
|
||||
$fileSize = (Get-Item $tempCertPath).Length
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Decoded certificate too small: $fileSize bytes (expected >100 bytes)"
|
||||
}
|
||||
|
||||
# Update environment to point to file
|
||||
$env:SM_CLIENT_CERT_FILE = $tempCertPath
|
||||
|
||||
Log-Success "Certificate decoded and written to: $tempCertPath"
|
||||
Log-Debug "Decoded certificate file size: $fileSize bytes"
|
||||
|
||||
# Register cleanup
|
||||
$global:TEMP_CERT_PATH = $tempCertPath
|
||||
|
||||
} catch {
|
||||
# If base64 decode fails, check if it's a file path
|
||||
Log-Info "Base64 decode failed, checking if it's a file path..."
|
||||
Log-Debug "Decode error: $_"
|
||||
|
||||
if (Test-Path $env:SM_CLIENT_CERT_FILE) {
|
||||
$fileSize = (Get-Item $env:SM_CLIENT_CERT_FILE).Length
|
||||
|
||||
# Validate file size
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Certificate file too small: $fileSize bytes at $env:SM_CLIENT_CERT_FILE (possibly corrupted)"
|
||||
}
|
||||
|
||||
Log-Info "Using certificate file: $env:SM_CLIENT_CERT_FILE"
|
||||
Log-Debug "Certificate file size: $fileSize bytes"
|
||||
} else {
|
||||
throw "SM_CLIENT_CERT_FILE is neither valid base64 nor an existing file: $env:SM_CLIENT_CERT_FILE"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Install DigiCert KeyLocker tools
|
||||
function Install-KeyLocker {
|
||||
Log-Info "Setting up DigiCert KeyLocker tools..."
|
||||
|
||||
# Define our controlled installation directory
|
||||
$installDir = "C:\BuildTools\DigiCert"
|
||||
$smctlPath = Join-Path $installDir "smctl.exe"
|
||||
|
||||
# Check if already installed in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools already installed at: $smctlPath"
|
||||
|
||||
# Add to PATH if not already there
|
||||
if ($env:PATH -notlike "*$installDir*") {
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
}
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
Log-Info "Installing KeyLocker tools to: $installDir"
|
||||
|
||||
# Create the installation directory if it doesn't exist
|
||||
if (!(Test-Path $installDir)) {
|
||||
Log-Info "Creating installation directory: $installDir"
|
||||
try {
|
||||
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
|
||||
Log-Success "Created directory: $installDir"
|
||||
} catch {
|
||||
throw "Failed to create directory $installDir : $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
try {
|
||||
# Remove existing MSI if present
|
||||
if (Test-Path $msiPath) {
|
||||
Remove-Item $msiPath -Force
|
||||
Log-Debug "Removed existing MSI file"
|
||||
}
|
||||
|
||||
# Download with progress tracking
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($msiUrl, $msiPath)
|
||||
|
||||
if (!(Test-Path $msiPath)) {
|
||||
throw "MSI download failed - file not found"
|
||||
}
|
||||
|
||||
$fileSize = (Get-Item $msiPath).Length
|
||||
Log-Success "MSI downloaded successfully (size: $fileSize bytes)"
|
||||
|
||||
} catch {
|
||||
throw "Failed to download MSI: $_"
|
||||
}
|
||||
|
||||
# Install MSI
|
||||
Log-Info "Installing MSI..."
|
||||
Log-Debug "MSI path: $msiPath"
|
||||
Log-Debug "File exists: $(Test-Path $msiPath)"
|
||||
Log-Debug "File size: $((Get-Item $msiPath).Length) bytes"
|
||||
|
||||
# Check if running as administrator
|
||||
$isAdmin = ([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
|
||||
Log-Info "Running as administrator: $isAdmin"
|
||||
|
||||
# Install MSI silently to our controlled directory
|
||||
$arguments = @(
|
||||
"/i", "`"$msiPath`"",
|
||||
"/quiet",
|
||||
"/norestart",
|
||||
"TARGETDIR=`"$installDir`"",
|
||||
"INSTALLDIR=`"$installDir`"",
|
||||
"ACCEPT_EULA=1",
|
||||
"ADDLOCAL=ALL"
|
||||
)
|
||||
|
||||
Log-Debug "Running: msiexec.exe $($arguments -join ' ')"
|
||||
Log-Info "Installing to: $installDir"
|
||||
|
||||
$process = Start-Process -FilePath "msiexec.exe" -ArgumentList $arguments -Wait -PassThru -NoNewWindow
|
||||
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Log-Error "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
|
||||
# Try to get error details from event log
|
||||
try {
|
||||
$events = Get-WinEvent -LogName "Application" -MaxEvents 10 |
|
||||
Where-Object { $_.ProviderName -eq "MsiInstaller" -and $_.TimeCreated -gt (Get-Date).AddMinutes(-1) }
|
||||
|
||||
foreach ($event in $events) {
|
||||
Log-Debug "MSI Event: $($event.Message)"
|
||||
}
|
||||
} catch {
|
||||
Log-Debug "Could not retrieve MSI installation events"
|
||||
}
|
||||
|
||||
throw "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
}
|
||||
|
||||
Log-Success "MSI installation completed"
|
||||
|
||||
# Wait for installation to complete
|
||||
Start-Sleep -Seconds 2
|
||||
|
||||
# Verify smctl.exe exists in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools installed successfully at: $smctlPath"
|
||||
|
||||
# Add to PATH
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
# If not in our expected location, check if it installed somewhere in the directory
|
||||
$found = Get-ChildItem -Path $installDir -Filter "smctl.exe" -Recurse -ErrorAction SilentlyContinue |
|
||||
Select-Object -First 1
|
||||
|
||||
if ($found) {
|
||||
Log-Success "Found smctl.exe at: $($found.FullName)"
|
||||
$smctlDir = $found.DirectoryName
|
||||
$env:PATH = "$smctlDir;$env:PATH"
|
||||
return $found.FullName
|
||||
}
|
||||
|
||||
throw "KeyLocker tools installation succeeded but smctl.exe not found in $installDir"
|
||||
}
|
||||
|
||||
# Configure KeyLocker
|
||||
function Configure-KeyLocker {
|
||||
param([string]$SmctlPath)
|
||||
|
||||
Log-Info "Configuring KeyLocker..."
|
||||
|
||||
# Verify smctl is accessible
|
||||
try {
|
||||
$version = & $SmctlPath --version 2>&1
|
||||
Log-Debug "smctl version: $version"
|
||||
} catch {
|
||||
throw "Failed to run smctl: $_"
|
||||
}
|
||||
|
||||
# Configure KeyLocker credentials and environment
|
||||
Log-Info "Configuring KeyLocker credentials..."
|
||||
|
||||
try {
|
||||
# Save credentials (API key and password)
|
||||
Log-Info "Saving credentials to OS store..."
|
||||
$saveOutput = & $SmctlPath credentials save $env:SM_API_KEY $env:SM_CLIENT_CERT_PASSWORD 2>&1 | Out-String
|
||||
Log-Debug "Credentials save output: $saveOutput"
|
||||
|
||||
if ($saveOutput -like "*Credentials saved*") {
|
||||
Log-Success "Credentials saved successfully"
|
||||
}
|
||||
|
||||
# Set environment variables for smctl
|
||||
Log-Info "Setting KeyLocker environment variables..."
|
||||
$env:SM_HOST = $env:SM_HOST # Already set, but ensure it's available
|
||||
$env:SM_API_KEY = $env:SM_API_KEY # Already set
|
||||
$env:SM_CLIENT_CERT_FILE = $env:SM_CLIENT_CERT_FILE # Path to decoded cert file
|
||||
Log-Debug "SM_HOST: $env:SM_HOST"
|
||||
Log-Debug "SM_CLIENT_CERT_FILE: $env:SM_CLIENT_CERT_FILE"
|
||||
|
||||
# Run health check
|
||||
Log-Info "Running KeyLocker health check..."
|
||||
$healthOutput = & $SmctlPath healthcheck 2>&1 | Out-String
|
||||
Log-Debug "Health check output: $healthOutput"
|
||||
|
||||
if ($healthOutput -like "*Healthy*" -or $healthOutput -like "*SUCCESS*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "KeyLocker health check passed"
|
||||
} else {
|
||||
Log-Error "Health check failed: $healthOutput"
|
||||
# Don't throw here, sometimes healthcheck is flaky but signing still works
|
||||
}
|
||||
|
||||
# Sync certificates to Windows certificate store
|
||||
Log-Info "Syncing certificates to Windows store..."
|
||||
$syncOutput = & $SmctlPath windows certsync 2>&1 | Out-String
|
||||
Log-Debug "Certificate sync output: $syncOutput"
|
||||
|
||||
if ($syncOutput -like "*success*" -or $syncOutput -like "*synced*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "Certificates synced to Windows store"
|
||||
} else {
|
||||
Log-Info "Certificate sync output: $syncOutput"
|
||||
}
|
||||
|
||||
} catch {
|
||||
throw "Failed to configure KeyLocker: $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Sign an executable
|
||||
function Sign-Executable {
|
||||
param(
|
||||
[string]$ExePath,
|
||||
[string]$SmctlPath
|
||||
)
|
||||
|
||||
if (!(Test-Path $ExePath)) {
|
||||
throw "Executable not found: $ExePath"
|
||||
}
|
||||
|
||||
$fileName = Split-Path $ExePath -Leaf
|
||||
Log-Info "Signing $fileName..."
|
||||
Log-Debug "Full path: $ExePath"
|
||||
Log-Debug "File size: $((Get-Item $ExePath).Length) bytes"
|
||||
|
||||
# Check if already signed
|
||||
$existingSig = Get-AuthenticodeSignature $ExePath
|
||||
if ($existingSig.Status -eq "Valid") {
|
||||
Log-Info "$fileName is already signed by: $($existingSig.SignerCertificate.Subject)"
|
||||
Log-Info "Skipping re-signing"
|
||||
return
|
||||
}
|
||||
|
||||
# Sign the executable using smctl
|
||||
try {
|
||||
# smctl sign command with keypair-alias
|
||||
$signArgs = @(
|
||||
"sign",
|
||||
"--keypair-alias", $env:SM_KEYPAIR_ALIAS,
|
||||
"--input", $ExePath,
|
||||
"--verbose"
|
||||
)
|
||||
|
||||
Log-Debug "Running: $SmctlPath $($signArgs -join ' ')"
|
||||
|
||||
$signOutput = & $SmctlPath $signArgs 2>&1 | Out-String
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Log-Error "Signing output: $signOutput"
|
||||
throw "Signing failed with exit code: $LASTEXITCODE"
|
||||
}
|
||||
|
||||
Log-Debug "Signing output: $signOutput"
|
||||
Log-Success "Signing command completed"
|
||||
|
||||
} catch {
|
||||
throw "Failed to sign $fileName : $_"
|
||||
}
|
||||
|
||||
# Verify signature
|
||||
$newSig = Get-AuthenticodeSignature $ExePath
|
||||
|
||||
if ($newSig.Status -eq "Valid") {
|
||||
Log-Success "$fileName signed successfully"
|
||||
Log-Info "Signed by: $($newSig.SignerCertificate.Subject)"
|
||||
Log-Info "Thumbprint: $($newSig.SignerCertificate.Thumbprint)"
|
||||
Log-Info "Valid from: $($newSig.SignerCertificate.NotBefore) to $($newSig.SignerCertificate.NotAfter)"
|
||||
} else {
|
||||
throw "$fileName signature verification failed: $($newSig.Status) - $($newSig.StatusMessage)"
|
||||
}
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
function Cleanup {
|
||||
if ($global:TEMP_CERT_PATH -and (Test-Path $global:TEMP_CERT_PATH)) {
|
||||
try {
|
||||
Remove-Item $global:TEMP_CERT_PATH -Force
|
||||
Log-Info "Cleaned up temporary certificate"
|
||||
} catch {
|
||||
Log-Error "Failed to cleanup temporary certificate: $_"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Main execution
|
||||
try {
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
Write-Host " Windows Code Signing for Bun" -ForegroundColor Cyan
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
|
||||
# Ensure we're in a VS environment
|
||||
Ensure-VSEnvironment
|
||||
|
||||
# Check environment variables
|
||||
Check-Environment
|
||||
|
||||
# Setup certificate
|
||||
Setup-Certificate
|
||||
|
||||
# Install and configure KeyLocker
|
||||
$smctlPath = Install-KeyLocker
|
||||
Configure-KeyLocker -SmctlPath $smctlPath
|
||||
|
||||
# Sign both executables
|
||||
Sign-Executable -ExePath $BunProfileExe -SmctlPath $smctlPath
|
||||
Sign-Executable -ExePath $BunExe -SmctlPath $smctlPath
|
||||
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
Write-Host " Code signing completed successfully!" -ForegroundColor Green
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
|
||||
exit 0
|
||||
|
||||
} catch {
|
||||
Log-Error "Code signing failed: $_"
|
||||
exit 1
|
||||
|
||||
} finally {
|
||||
Cleanup
|
||||
}
|
||||
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@@ -3,7 +3,3 @@
|
||||
|
||||
# Tests
|
||||
/test/expectations.txt @Jarred-Sumner
|
||||
|
||||
# Types
|
||||
*.d.ts @alii
|
||||
/packages/bun-types/ @alii
|
||||
|
||||
2
.github/actions/bump/action.yml
vendored
2
.github/actions/bump/action.yml
vendored
@@ -25,7 +25,7 @@ runs:
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
|
||||
47
.github/pull_request_template.md
vendored
47
.github/pull_request_template.md
vendored
@@ -1,3 +1,50 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
|
||||
103
.github/workflows/CLAUDE.md
vendored
103
.github/workflows/CLAUDE.md
vendored
@@ -1,103 +0,0 @@
|
||||
# GitHub Actions Workflow Maintenance Guide
|
||||
|
||||
This document provides guidance for maintaining the GitHub Actions workflows in this repository.
|
||||
|
||||
## format.yml Workflow
|
||||
|
||||
### Overview
|
||||
The `format.yml` workflow runs code formatters (Prettier, clang-format, and Zig fmt) on pull requests and pushes to main. It's optimized for speed by running all formatters in parallel.
|
||||
|
||||
### Key Components
|
||||
|
||||
#### 1. Clang-format Script (`scripts/run-clang-format.sh`)
|
||||
- **Purpose**: Formats C++ source and header files
|
||||
- **What it does**:
|
||||
- Reads C++ files from `cmake/sources/CxxSources.txt`
|
||||
- Finds all header files in `src/` and `packages/`
|
||||
- Excludes third-party directories (libuv, napi, deps, vendor, sqlite, etc.)
|
||||
- Requires specific clang-format version (no fallbacks)
|
||||
|
||||
**Important exclusions**:
|
||||
- `src/napi/` - Node API headers (third-party)
|
||||
- `src/bun.js/bindings/libuv/` - libuv headers (third-party)
|
||||
- `src/bun.js/bindings/sqlite/` - SQLite headers (third-party)
|
||||
- `src/bun.js/api/ffi-*.h` - FFI headers (generated/third-party)
|
||||
- `src/deps/` - Dependencies (third-party)
|
||||
- Files in `vendor/`, `third_party/`, `generated/` directories
|
||||
|
||||
#### 2. Parallel Execution
|
||||
The workflow runs all three formatters simultaneously:
|
||||
- Each formatter outputs with a prefix (`[prettier]`, `[clang-format]`, `[zig]`)
|
||||
- Output is streamed in real-time without blocking
|
||||
- Uses GitHub Actions groups (`::group::`) for collapsible sections
|
||||
|
||||
#### 3. Tool Installation
|
||||
|
||||
##### Clang-format-19
|
||||
- Installs ONLY `clang-format-19` package (not the entire LLVM toolchain)
|
||||
- Uses `--no-install-recommends --no-install-suggests` to skip unnecessary packages
|
||||
- Quiet installation with `-qq` and `-o=Dpkg::Use-Pty=0`
|
||||
|
||||
##### Zig
|
||||
- Downloads from `oven-sh/zig` releases (musl build for static linking)
|
||||
- URL: `https://github.com/oven-sh/zig/releases/download/autobuild-{COMMIT}/bootstrap-x86_64-linux-musl.zip`
|
||||
- Extracts to temp directory to avoid polluting the repository
|
||||
- Directory structure: `bootstrap-x86_64-linux-musl/zig`
|
||||
|
||||
### Updating the Workflow
|
||||
|
||||
#### To update Zig version:
|
||||
1. Find the new commit hash from https://github.com/oven-sh/zig/releases
|
||||
2. Replace the hash in the wget URL (line 65 of format.yml)
|
||||
3. Test that the URL is valid and the binary works
|
||||
|
||||
#### To update clang-format version:
|
||||
1. Update `LLVM_VERSION_MAJOR` environment variable at the top of format.yml
|
||||
2. Update the version check in `scripts/run-clang-format.sh`
|
||||
|
||||
#### To add/remove file exclusions:
|
||||
1. Edit the exclusion patterns in `scripts/run-clang-format.sh` (lines 34-39)
|
||||
2. Test locally to ensure the right files are being formatted
|
||||
|
||||
### Performance Optimizations
|
||||
1. **Parallel execution**: All formatters run simultaneously
|
||||
2. **Minimal installations**: Only required packages, no extras
|
||||
3. **Temp directories**: Tools downloaded to temp dirs, cleaned up after use
|
||||
4. **Streaming output**: Real-time feedback without buffering
|
||||
5. **Early start**: Formatting begins immediately after each tool is ready
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
**If formatters appear to run sequentially:**
|
||||
- Check if output is being buffered (should use `sed` for line prefixing)
|
||||
- Ensure background processes use `&` and proper wait commands
|
||||
|
||||
**If third-party files are being formatted:**
|
||||
- Review exclusion patterns in `scripts/run-clang-format.sh`
|
||||
- Check if new third-party directories were added that need exclusion
|
||||
|
||||
**If clang-format installation is slow:**
|
||||
- Ensure using minimal package installation flags
|
||||
- Check if apt cache needs updating
|
||||
- Consider caching the clang-format binary between runs
|
||||
|
||||
### Testing Changes Locally
|
||||
|
||||
```bash
|
||||
# Test the clang-format script
|
||||
export LLVM_VERSION_MAJOR=19
|
||||
./scripts/run-clang-format.sh format
|
||||
|
||||
# Test with check mode (no modifications)
|
||||
./scripts/run-clang-format.sh check
|
||||
|
||||
# Test specific file exclusions
|
||||
./scripts/run-clang-format.sh format 2>&1 | grep -E "(libuv|napi|deps)"
|
||||
# Should return nothing if exclusions work correctly
|
||||
```
|
||||
|
||||
### Important Notes
|
||||
- The script defaults to **format** mode (modifies files)
|
||||
- Always test locally before pushing workflow changes
|
||||
- The musl Zig build works on glibc systems due to static linking
|
||||
- Keep the exclusion list updated as new third-party code is added
|
||||
19
.github/workflows/auto-assign-types.yml
vendored
19
.github/workflows/auto-assign-types.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Auto Assign Types Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'types'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Assign to alii
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_REPO: ${{ github.repository }}
|
||||
run: |
|
||||
gh issue edit ${{ github.event.issue.number }} --add-assignee alii
|
||||
24
.github/workflows/auto-label-claude-prs.yml
vendored
24
.github/workflows/auto-label-claude-prs.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: Auto-label Claude PRs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
auto-label:
|
||||
if: github.event.pull_request.user.login == 'robobun' || contains(github.event.pull_request.body, '🤖 Generated with')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Add claude label to PRs from robobun
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['claude']
|
||||
});
|
||||
48
.github/workflows/claude.yml
vendored
48
.github/workflows/claude.yml
vendored
@@ -13,55 +13,23 @@ on:
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
github.repository == 'oven-sh/bun' &&
|
||||
(
|
||||
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
|
||||
) &&
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: claude
|
||||
env:
|
||||
IS_SANDBOX: 1
|
||||
container:
|
||||
image: localhost:5000/claude-bun:latest
|
||||
options: --privileged --user 1000:1000
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
working-directory: /workspace/bun
|
||||
run: |
|
||||
git config --global user.email "claude-bot@bun.sh" && \
|
||||
git config --global user.name "Claude Bot" && \
|
||||
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
|
||||
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
|
||||
git config --global --add safe.directory /workspace/bun && \
|
||||
git config --global push.default current && \
|
||||
git config --global pull.rebase true && \
|
||||
git config --global init.defaultBranch main && \
|
||||
git config --global core.editor "vim" && \
|
||||
git config --global color.ui auto && \
|
||||
git config --global fetch.prune true && \
|
||||
git config --global diff.colorMoved zebra && \
|
||||
git config --global merge.conflictStyle diff3 && \
|
||||
git config --global rerere.enabled true && \
|
||||
git config --global core.autocrlf input
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.ref }}
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
# TODO: switch this out once they merge their v1
|
||||
uses: km-anthropic/claude-code-action@v1-dev
|
||||
uses: anthropics/claude-code-action@beta
|
||||
with:
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
--dangerously-skip-permissions
|
||||
--system-prompt "You are working on the Bun codebase"
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -6,8 +6,6 @@ on:
|
||||
- "docs/**"
|
||||
- "packages/bun-types/**.d.ts"
|
||||
- "CONTRIBUTING.md"
|
||||
- "src/cli/install.sh"
|
||||
- "src/cli/install.ps1"
|
||||
branches:
|
||||
- main
|
||||
|
||||
|
||||
88
.github/workflows/format.yml
vendored
88
.github/workflows/format.yml
vendored
@@ -8,8 +8,10 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
env:
|
||||
BUN_VERSION: "1.2.20"
|
||||
BUN_VERSION: "1.2.11"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
@@ -35,75 +37,25 @@ jobs:
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
bun scripts/glob-sources.mjs
|
||||
- name: Format Code
|
||||
- name: Install LLVM
|
||||
run: |
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
echo "[clang-format] Installing clang-format-${{ env.LLVM_VERSION_MAJOR }}..."
|
||||
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo tee /etc/apt/trusted.gpg.d/apt.llvm.org.asc > /dev/null
|
||||
echo "deb http://apt.llvm.org/$(lsb_release -cs)/ llvm-toolchain-$(lsb_release -cs)-${{ env.LLVM_VERSION_MAJOR }} main" | sudo tee /etc/apt/sources.list.d/llvm.list > /dev/null
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -y -qq --no-install-recommends --no-install-suggests -o=Dpkg::Use-Pty=0 clang-format-${{ env.LLVM_VERSION_MAJOR }}
|
||||
echo "[clang-format] Running clang-format..."
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-e0b7c318f318196c5f81fdf3423816a7b5bb3112/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
./scripts/sort-imports.ts src 2>&1 | sed 's/^/[zig] /'
|
||||
zig fmt src 2>&1 | sed 's/^/[zig] /'
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
zig fmt src
|
||||
./scripts/sort-imports.ts src
|
||||
zig fmt src
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
git rm -f cmake/sources/*.txt || true
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
41
.github/workflows/glob-sources.yml
vendored
Normal file
41
.github/workflows/glob-sources.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Glob Sources
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
|
||||
jobs:
|
||||
glob-sources:
|
||||
name: Glob Sources
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Glob sources
|
||||
run: bun scripts/glob-sources.mjs
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun scripts/glob-sources.mjs`"
|
||||
|
||||
104
.github/workflows/labeled.yml
vendored
104
.github/workflows/labeled.yml
vendored
@@ -5,8 +5,6 @@ env:
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
pull_request_target:
|
||||
types: [labeled, opened, reopened, synchronize, unlabeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
@@ -45,46 +43,9 @@ jobs:
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'slop')
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Update PR title and body for slop and close
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Delete the branch if it's from a fork or if it's not a protected branch
|
||||
try {
|
||||
await github.rest.git.deleteRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `heads/${pr.data.head.ref}`
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Could not delete branch:', error.message);
|
||||
}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'issues' && (github.event.label.name == 'crash' || github.event.label.name == 'needs repro')
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
@@ -105,16 +66,11 @@ jobs:
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
GITHUB_ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
# Check for patterns that should close the issue
|
||||
CLOSE_ACTION=$(bun scripts/handle-crash-patterns.ts)
|
||||
echo "close-action=$CLOSE_ACTION" >> $GITHUB_OUTPUT
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@@ -123,10 +79,6 @@ jobs:
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-standalone.txt" ]]; then
|
||||
echo "is-standalone=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
@@ -136,32 +88,9 @@ jobs:
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt is-standalone.txt
|
||||
- name: Close issue if pattern detected
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close == true
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const closeAction = JSON.parse('${{ steps.add-labels.outputs.close-action }}');
|
||||
|
||||
// Comment with the reason
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: closeAction.comment
|
||||
});
|
||||
|
||||
// Close the issue
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed',
|
||||
state_reason: closeAction.reason
|
||||
});
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash' && fromJson(steps.add-labels.outputs.close-action).close != true
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
@@ -195,17 +124,8 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated (standalone executable)
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
@@ -219,22 +139,8 @@ jobs:
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version (standalone executable)
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but the standalone executable is running Bun v${{ steps.add-labels.outputs.outdated }}. When the CLI using Bun's single-file executable next updates it might be fixed.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true' && steps.add-labels.outputs.is-standalone != 'true'
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
89
.github/workflows/on-submodule-update.yml
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
17
.github/workflows/update-hdrhistogram.yml
vendored
17
.github/workflows/update-hdrhistogram.yml
vendored
@@ -55,13 +55,10 @@ jobs:
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Try to get commit SHA from tag object (for annotated tags)
|
||||
# If it fails, assume it's a lightweight tag pointing directly to commit
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" 2>/dev/null | jq -r '.object.sha // empty')
|
||||
if [ -z "$LATEST_SHA" ]; then
|
||||
# Lightweight tag - SHA points directly to commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/HdrHistogram/HdrHistogram_c/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -83,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
@@ -91,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -99,4 +96,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/HdrHistogram/HdrHistogram_c/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-hdrhistogram.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
|
||||
35
.github/workflows/update-highway.yml
vendored
35
.github/workflows/update-highway.yml
vendored
@@ -50,33 +50,14 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TAG_OBJECT_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
if [ -z "$TAG_OBJECT_SHA" ] || [ "$TAG_OBJECT_SHA" = "null" ]; then
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Handle both lightweight tags (type: commit) and annotated tags (type: tag)
|
||||
if [ "$TAG_OBJECT_TYPE" = "commit" ]; then
|
||||
# Lightweight tag - object.sha is already the commit SHA
|
||||
LATEST_SHA="$TAG_OBJECT_SHA"
|
||||
elif [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# Annotated tag - need to fetch the tag object to get the commit SHA
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$TAG_OBJECT_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $TAG_OBJECT_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo "Error: Unexpected tag object type: $TAG_OBJECT_TYPE"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/google/highway/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -99,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
@@ -107,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
@@ -115,4 +96,4 @@ jobs:
|
||||
|
||||
Compare: https://github.com/google/highway/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-highway.yml)
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
24
.github/workflows/update-lolhtml.yml
vendored
24
.github/workflows/update-lolhtml.yml
vendored
@@ -50,27 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the commit SHA that the tag points to
|
||||
# This handles both lightweight tags (direct commit refs) and annotated tags (tag objects)
|
||||
TAG_REF_RESPONSE=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG")
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.sha')
|
||||
TAG_OBJECT_TYPE=$(echo "$TAG_REF_RESPONSE" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$TAG_OBJECT_TYPE" = "tag" ]; then
|
||||
# This is an annotated tag, we need to get the commit it points to
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# This is a lightweight tag pointing directly to a commit
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -92,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
29
.github/workflows/update-lshpack.yml
vendored
29
.github/workflows/update-lshpack.yml
vendored
@@ -50,32 +50,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the tag reference, which contains both SHA and type
|
||||
TAG_REF=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG")
|
||||
if [ -z "$TAG_REF" ]; then
|
||||
echo "Error: Could not fetch tag reference for $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(echo "$TAG_REF" | jq -r '.object.sha')
|
||||
TAG_TYPE=$(echo "$TAG_REF" | jq -r '.object.type')
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/refs/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If it's an annotated tag, we need to dereference it to get the commit SHA
|
||||
# If it's a lightweight tag, the SHA already points to the commit
|
||||
if [ "$TAG_TYPE" = "tag" ]; then
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch commit SHA for annotated tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# For lightweight tags, the SHA is already the commit SHA
|
||||
LATEST_SHA="$LATEST_TAG_SHA"
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
@@ -97,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
79
.github/workflows/update-vendor.yml
vendored
79
.github/workflows/update-vendor.yml
vendored
@@ -1,79 +0,0 @@
|
||||
name: Update vendor
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
package:
|
||||
- elysia
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Check version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
current=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].tag' ${{ matrix.package }})
|
||||
repository=$(bun -p '(await Bun.file("test/vendor.json").json()).filter(v=>v.package===process.argv[1])[0].repository' ${{ matrix.package }} | cut -d'/' -f4,5)
|
||||
|
||||
if [ -z "$current" ]; then
|
||||
echo "Error: Could not find COMMIT line in test/vendor.json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$current" >> $GITHUB_OUTPUT
|
||||
echo "repository=$repository" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/${repository}/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
bun -e 'await Bun.write("test/vendor.json", JSON.stringify((await Bun.file("test/vendor.json").json()).map(v=>{if(v.package===process.argv[1])v.tag=process.argv[2];return v;}), null, 2) + "\n")' ${{ matrix.package }} ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
test/vendor.json
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates ${{ matrix.package }} to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://github.com/${{ steps.check-version.outputs.repository }}/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-vendor.yml)
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
|
||||
5
.github/workflows/vscode-release.yml
vendored
5
.github/workflows/vscode-release.yml
vendored
@@ -45,8 +45,3 @@ jobs:
|
||||
env:
|
||||
VSCE_PAT: ${{ secrets.VSCODE_EXTENSION }}
|
||||
working-directory: packages/bun-vscode/extension
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
path: packages/bun-vscode/extension/bun-vscode-${{ github.event.inputs.version }}.vsix
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -184,9 +184,4 @@ codegen-for-zig-team.tar.gz
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
*.bun-build
|
||||
14
.vscode/launch.json
generated
vendored
14
.vscode/launch.json
generated
vendored
@@ -22,12 +22,6 @@
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
@@ -60,17 +54,11 @@
|
||||
"name": "bun run [file]",
|
||||
"program": "${workspaceFolder}/build/debug/bun-debug",
|
||||
"args": ["${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"cwd": "${fileDirname}",
|
||||
"env": {
|
||||
"FORCE_COLOR": "0",
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
// "BUN_DESTRUCT_VM_ON_EXIT": "1",
|
||||
// "ASAN_OPTIONS": "allow_user_segv_handler=1:disable_coredump=0:detect_leaks=1:abort_on_error=1",
|
||||
// "LSAN_OPTIONS": "malloc_context_size=100:print_suppressions=1:suppressions=${workspaceFolder}/test/leaksan.supp",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "./build/debug/bun-debug test",
|
||||
// "bun.test.customScript": "./build/debug/bun-debug test"
|
||||
}
|
||||
|
||||
50
CLAUDE.md
50
CLAUDE.md
@@ -4,14 +4,18 @@ This is the Bun repository - an all-in-one JavaScript runtime & toolkit designed
|
||||
|
||||
### Build Commands
|
||||
|
||||
- **Build Bun**: `bun bd`
|
||||
- **Build debug version**: `bun bd`
|
||||
- Creates a debug build at `./build/debug/bun-debug`
|
||||
- **CRITICAL**: no need for a timeout, the build is really fast!
|
||||
- Compilation takes ~5 minutes. Don't timeout, be patient.
|
||||
- **Run tests with your debug build**: `bun bd test <test-file>`
|
||||
- **CRITICAL**: Never use `bun test` directly - it won't include your changes
|
||||
- **Run any command with debug build**: `bun bd <command>`
|
||||
|
||||
Tip: Bun is already installed and in $PATH. The `bd` subcommand is a package.json script.
|
||||
### Other Build Variants
|
||||
|
||||
- `bun run build:release` - Release build
|
||||
|
||||
Address sanitizer is enabled by default in debug builds of Bun.
|
||||
|
||||
## Testing
|
||||
|
||||
@@ -39,11 +43,11 @@ Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
using dir = tempDir("test-prefix", {
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
@@ -51,26 +55,20 @@ test("my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: String(dir),
|
||||
stderr: "pipe",
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
proc.stdout.text(),
|
||||
proc.stderr.text(),
|
||||
new Response(proc.stdout).text(),
|
||||
new Response(proc.stderr).text(),
|
||||
proc.exited,
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
|
||||
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
|
||||
expect(stdout).toBe("hello\n");
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
### Language Structure
|
||||
@@ -135,6 +133,7 @@ test("my feature", async () => {
|
||||
When implementing JavaScript classes in C++:
|
||||
|
||||
1. Create three classes if there's a public constructor:
|
||||
|
||||
- `class Foo : public JSC::JSDestructibleObject` (if has C++ fields)
|
||||
- `class FooPrototype : public JSC::JSNonFinalObject`
|
||||
- `class FooConstructor : public JSC::InternalFunction`
|
||||
@@ -194,6 +193,7 @@ Built-in JavaScript modules use special syntax and are organized as:
|
||||
```
|
||||
|
||||
3. **Debug helpers**:
|
||||
|
||||
- `$debug()` - Like console.log but stripped in release builds
|
||||
- `$assert()` - Assertions stripped in release builds
|
||||
- `if($debug) {}` - Check if debug env var is set
|
||||
@@ -221,17 +221,15 @@ bun ci
|
||||
## Important Development Notes
|
||||
|
||||
1. **Never use `bun test` or `bun <file>` directly** - always use `bun bd test` or `bun bd <command>`. `bun bd` compiles & runs the debug build.
|
||||
2. **All changes must be tested** - if you're not testing your changes, you're not done.
|
||||
3. **Get your tests to pass**. If you didn't run the tests, your code does not work.
|
||||
4. **Follow existing code style** - check neighboring files for patterns
|
||||
5. **Create tests in the right folder** in `test/` and the test must end in `.test.ts` or `.test.tsx`
|
||||
6. **Use absolute paths** - Always use absolute paths in file operations
|
||||
7. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
8. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
9. **Cross-platform** - Run `bun run zig:check-all` to compile the Zig code on all platforms when making platform-specific changes
|
||||
10. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
11. **Be humble & honest** - NEVER overstate what you got done or what actually works in commits, PRs or in messages to the user.
|
||||
12. **Branch names must start with `claude/`** - This is a requirement for the CI to work.
|
||||
2. **Use `await using`** for proper resource cleanup with Bun APIs (Bun.spawn, Bun.serve, Bun.connect, etc.)
|
||||
3. **Follow existing code style** - check neighboring files for patterns
|
||||
4. **Create regression tests** in `test/regression/issue/` when fixing bugs
|
||||
5. **Use absolute paths** - Always use absolute paths in file operations
|
||||
6. **Avoid shell commands** - Don't use `find` or `grep` in tests; use Bun's Glob and built-in tools
|
||||
7. **Memory management** - In Zig code, be careful with allocators and use defer for cleanup
|
||||
8. **Cross-platform** - Test on macOS, Linux, and Windows when making platform-specific changes
|
||||
9. **Debug builds** - Use `BUN_DEBUG_QUIET_LOGS=1` to disable debug logging, or `BUN_DEBUG_<scope>=1` to enable specific scopes
|
||||
10. **Transpiled source** - Find transpiled files in `/tmp/bun-debug-src/` for debugging
|
||||
|
||||
## Key APIs and Features
|
||||
|
||||
|
||||
@@ -31,11 +31,6 @@ include(SetupCcache)
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
|
||||
# Bun uses C++23, which is compatible with BoringSSL's C++17 requirement
|
||||
set(CMAKE_CXX_STANDARD 23)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
@@ -48,9 +43,6 @@ include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
# Generate dependency versions header
|
||||
include(GenerateDependencyVersions)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
@@ -21,7 +21,7 @@ $ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo clang19 llvm19 lld19 ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
@@ -223,8 +223,8 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
$ bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
@@ -15,13 +15,11 @@
|
||||
"eventemitter3": "^5.0.0",
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fastify": "^5.0.0",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
@@ -95,18 +93,6 @@
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="],
|
||||
|
||||
"@fastify/ajv-compiler": ["@fastify/ajv-compiler@4.0.2", "", { "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0" } }, "sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ=="],
|
||||
|
||||
"@fastify/error": ["@fastify/error@4.2.0", "", {}, "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ=="],
|
||||
|
||||
"@fastify/fast-json-stringify-compiler": ["@fastify/fast-json-stringify-compiler@5.0.3", "", { "dependencies": { "fast-json-stringify": "^6.0.0" } }, "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ=="],
|
||||
|
||||
"@fastify/forwarded": ["@fastify/forwarded@3.0.0", "", {}, "sha512-kJExsp4JCms7ipzg7SJ3y8DwmePaELHxKYtg+tZow+k0znUTf3cb+npgyqm8+ATZOdmfgfydIebPDWM172wfyA=="],
|
||||
|
||||
"@fastify/merge-json-schemas": ["@fastify/merge-json-schemas@0.2.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A=="],
|
||||
|
||||
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
|
||||
@@ -157,20 +143,10 @@
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
|
||||
|
||||
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
|
||||
|
||||
"ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
|
||||
|
||||
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
@@ -191,16 +167,12 @@
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
@@ -261,22 +233,10 @@
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
|
||||
|
||||
"fast-json-stringify": ["fast-json-stringify@6.0.1", "", { "dependencies": { "@fastify/merge-json-schemas": "^0.2.0", "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0", "json-schema-ref-resolver": "^2.0.0", "rfdc": "^1.2.0" } }, "sha512-s7SJE83QKBZwg54dIbD5rCtzOBVD43V1ReWXXYqBgwCwHLYAAT0RQc/FmrQglXqWPpz6omtryJQOau5jI4Nrvg=="],
|
||||
|
||||
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
|
||||
|
||||
"fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="],
|
||||
|
||||
"fast-uri": ["fast-uri@3.0.6", "", {}, "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw=="],
|
||||
|
||||
"fastify": ["fastify@5.5.0", "", { "dependencies": { "@fastify/ajv-compiler": "^4.0.0", "@fastify/error": "^4.0.0", "@fastify/fast-json-stringify-compiler": "^5.0.0", "@fastify/proxy-addr": "^5.0.0", "abstract-logging": "^2.0.1", "avvio": "^9.0.0", "fast-json-stringify": "^6.0.0", "find-my-way": "^9.0.0", "light-my-request": "^6.0.0", "pino": "^9.0.0", "process-warning": "^5.0.0", "rfdc": "^1.3.1", "secure-json-parse": "^4.0.0", "semver": "^7.6.0", "toad-cache": "^3.7.0" } }, "sha512-ZWSWlzj3K/DcULCnCjEiC2zn2FBPdlZsSA/pnPa/dbUfLvxkD/Nqmb0XXMXLrWkeM4uQPUvjdJpwtXmTfriXqw=="],
|
||||
|
||||
"fastq": ["fastq@1.15.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw=="],
|
||||
|
||||
"fdir": ["fdir@6.1.0", "", { "peerDependencies": { "picomatch": "2.x" } }, "sha512-274qhz5PxNnA/fybOu6apTCUnM0GnO3QazB6VH+oag/7DQskdYq8lm07ZSm90kEQuWYH5GvjAxGruuHrEr0bcg=="],
|
||||
@@ -285,8 +245,6 @@
|
||||
|
||||
"fill-range": ["fill-range@7.0.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ=="],
|
||||
|
||||
"find-my-way": ["find-my-way@9.3.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-eRoFWQw+Yv2tuYlK2pjFS2jGXSxSppAs3hSQjfxVKxM5amECzIgYYc1FEI8ZmhSh/Ig+FrKEz43NLRKJjYCZVg=="],
|
||||
|
||||
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
|
||||
|
||||
"from": ["from@0.1.7", "", {}, "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g=="],
|
||||
@@ -315,8 +273,6 @@
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
@@ -333,16 +289,10 @@
|
||||
|
||||
"jsesc": ["jsesc@2.5.2", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="],
|
||||
|
||||
"json-schema-ref-resolver": ["json-schema-ref-resolver@2.0.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-HG0SIB9X4J8bwbxCbnd5FfPEbcXAJYTi1pBJeP/QPON+w8ovSME8iRG+ElHNxZNX2Qh6eYn1GdzJFS4cDFfx0Q=="],
|
||||
|
||||
"json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="],
|
||||
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
|
||||
"jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="],
|
||||
|
||||
"light-my-request": ["light-my-request@6.6.0", "", { "dependencies": { "cookie": "^1.0.1", "process-warning": "^4.0.0", "set-cookie-parser": "^2.6.0" } }, "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A=="],
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
@@ -373,8 +323,6 @@
|
||||
|
||||
"npm-run-path": ["npm-run-path@5.2.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg=="],
|
||||
|
||||
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
@@ -387,50 +335,24 @@
|
||||
|
||||
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"pino": ["pino@9.9.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-zxsRIQG9HzG+jEljmvmZupOMDUQ0Jpj0yAgE28jQvvrdYTlEaiGwelJpdndMl/MBuRr70heIj83QyqJUWaU8mQ=="],
|
||||
|
||||
"pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="],
|
||||
|
||||
"pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="],
|
||||
|
||||
"platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="],
|
||||
|
||||
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
|
||||
|
||||
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
|
||||
|
||||
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
|
||||
|
||||
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
|
||||
|
||||
"rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"safe-regex2": ["safe-regex2@5.0.0", "", { "dependencies": { "ret": "~0.5.0" } }, "sha512-YwJwe5a51WlK7KbOJREPdjNrpViQBI3p4T50lfwPuDhZnE3XGVTlGvi+aolc5+RvxDD6bnUmjVsU9n1eboLUYw=="],
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
|
||||
|
||||
"semver": ["semver@6.3.0", "", { "bin": { "semver": "./bin/semver.js" } }, "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="],
|
||||
|
||||
"set-cookie-parser": ["set-cookie-parser@2.7.1", "", {}, "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="],
|
||||
|
||||
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||
|
||||
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||
@@ -441,12 +363,8 @@
|
||||
|
||||
"slash": ["slash@4.0.0", "", {}, "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew=="],
|
||||
|
||||
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
|
||||
|
||||
"stream-combiner": ["stream-combiner@0.0.4", "", { "dependencies": { "duplexer": "~0.1.1" } }, "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw=="],
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
@@ -457,8 +375,6 @@
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
|
||||
|
||||
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
|
||||
|
||||
"tinycolor2": ["tinycolor2@1.6.0", "", {}, "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw=="],
|
||||
@@ -467,8 +383,6 @@
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
@@ -493,14 +407,8 @@
|
||||
|
||||
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
|
||||
|
||||
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"fastify/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
|
||||
|
||||
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"strip-ansi": "^7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3"
|
||||
},
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
// Benchmark for object fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker } from "node:worker_threads";
|
||||
|
||||
const extraProperties = {
|
||||
a: "a!",
|
||||
b: "b!",
|
||||
"second": "c!",
|
||||
bool: true,
|
||||
nully: null,
|
||||
undef: undefined,
|
||||
int: 0,
|
||||
double: 1.234,
|
||||
falsy: false,
|
||||
};
|
||||
|
||||
const objects = {
|
||||
small: { property: "Hello world", ...extraProperties },
|
||||
medium: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
large: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
switch (data.property.length) {
|
||||
case ${objects.small.property.length}:
|
||||
case ${objects.medium.property.length}:
|
||||
case ${objects.large.property.length}: {
|
||||
if (
|
||||
data.a === "a!" &&
|
||||
data.b === "b!" &&
|
||||
data.second === "c!" &&
|
||||
data.bool === true &&
|
||||
data.nully === null &&
|
||||
data.undef === undefined &&
|
||||
data.int === 0 &&
|
||||
data.double === 1.234 &&
|
||||
data.falsy === false) {
|
||||
Atomics.add(int, 0, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
default: {
|
||||
throw new Error("Invalid data object: " + JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage({ prop: " + fmt(objects.small.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.small);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.medium.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.medium);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.large.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
@@ -1,77 +0,0 @@
|
||||
// Benchmark for string fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker, isMainThread, parentPort } from "node:worker_threads";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
large: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
Atomics.add(int, 0, 1);
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage(" + fmt(strings.small.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.small);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.medium.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.medium);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.large.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
@@ -1,56 +0,0 @@
|
||||
// Benchmark for string fast path optimization in postMessage and structuredClone
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: "Hello World!!!".repeat(1024).split("").join(""),
|
||||
large: "Hello World!!!".repeat(1024).repeat(1024).split("").join(""),
|
||||
};
|
||||
|
||||
console.log("String fast path benchmark");
|
||||
console.log("Comparing pure strings (fast path) vs objects containing strings (traditional)");
|
||||
console.log("For structuredClone, pure strings should have constant time regardless of size.");
|
||||
console.log("");
|
||||
|
||||
// Benchmark structuredClone with pure strings (uses fast path)
|
||||
bench("structuredClone small string (fast path)", () => {
|
||||
structuredClone(strings.small);
|
||||
});
|
||||
|
||||
bench("structuredClone medium string (fast path)", () => {
|
||||
structuredClone(strings.medium);
|
||||
});
|
||||
|
||||
bench("structuredClone large string (fast path)", () => {
|
||||
structuredClone(strings.large);
|
||||
});
|
||||
|
||||
// Benchmark structuredClone with objects containing strings (traditional path)
|
||||
bench("structuredClone object with small string", () => {
|
||||
structuredClone({ str: strings.small });
|
||||
});
|
||||
|
||||
bench("structuredClone object with medium string", () => {
|
||||
structuredClone({ str: strings.medium });
|
||||
});
|
||||
|
||||
bench("structuredClone object with large string", () => {
|
||||
structuredClone({ str: strings.large });
|
||||
});
|
||||
|
||||
// Multiple string cloning benchmark
|
||||
bench("structuredClone 100 small strings", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone(strings.small);
|
||||
}
|
||||
});
|
||||
|
||||
bench("structuredClone 100 small objects", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone({ str: strings.small });
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
import postgres from "postgres";
|
||||
const sql = isBun ? Bun.sql : postgres();
|
||||
const sql = isBun ? Bun.sql : postgres;
|
||||
|
||||
// Create the table if it doesn't exist
|
||||
await sql`
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
let conn;
|
||||
let sql;
|
||||
import * as mariadb from "mariadb";
|
||||
import * as mysql2 from "mysql2/promise";
|
||||
let useMYSQL2 = false;
|
||||
if (process.argv.includes("--mysql2")) {
|
||||
useMYSQL2 = true;
|
||||
}
|
||||
if (isBun) {
|
||||
sql = new Bun.SQL({
|
||||
adapter: "mysql",
|
||||
database: "test",
|
||||
username: "root",
|
||||
});
|
||||
} else {
|
||||
const pool = (useMYSQL2 ? mysql2 : mariadb).createPool({
|
||||
// Add your MariaDB connection details here
|
||||
user: "root",
|
||||
database: "test",
|
||||
});
|
||||
conn = await pool.getConnection();
|
||||
}
|
||||
|
||||
if (isBun) {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await sql`DROP TABLE IF EXISTS test100`;
|
||||
await sql`CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)`;
|
||||
await sql`INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)`;
|
||||
} else {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await conn.query("DROP TABLE IF EXISTS test100");
|
||||
await conn.query(
|
||||
"CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)",
|
||||
);
|
||||
await conn.query(
|
||||
"INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)",
|
||||
);
|
||||
}
|
||||
// Run the benchmark (equivalent to benchFct)
|
||||
const type = isBun ? "Bun.SQL" : useMYSQL2 ? "mysql2" : "mariadb";
|
||||
console.time(type);
|
||||
let promises = [];
|
||||
|
||||
for (let i = 0; i < 100_000; i++) {
|
||||
if (isBun) {
|
||||
promises.push(sql`select * FROM test100`);
|
||||
} else {
|
||||
promises.push(conn.query("select * FROM test100"));
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
console.timeEnd(type);
|
||||
|
||||
// Clean up connection
|
||||
if (!isBun && conn.release) {
|
||||
conn.release();
|
||||
}
|
||||
@@ -9,8 +9,6 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"mariadb": "^3.4.5",
|
||||
"mysql2": "^3.14.3",
|
||||
"postgres": "^3.4.7"
|
||||
}
|
||||
}
|
||||
@@ -12,9 +12,6 @@ const scenarios = [
|
||||
{ alg: "sha1", digest: "base64" },
|
||||
{ alg: "sha256", digest: "hex" },
|
||||
{ alg: "sha256", digest: "base64" },
|
||||
{ alg: "blake2b512", digest: "hex" },
|
||||
{ alg: "sha512-224", digest: "hex" },
|
||||
{ alg: "sha512-256", digest: "hex" },
|
||||
];
|
||||
|
||||
for (const { alg, digest } of scenarios) {
|
||||
@@ -26,10 +23,6 @@ for (const { alg, digest } of scenarios) {
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
|
||||
new Bun.CryptoHasher(alg).update(data).digest(digest);
|
||||
});
|
||||
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher.hash)`, () => {
|
||||
return Bun.CryptoHasher.hash(alg, data, digest);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -28,4 +28,10 @@ bench("brotli compress stream", async () => {
|
||||
await pipeline(source, compress);
|
||||
});
|
||||
|
||||
bench("brotli decompress stream", async () => {
|
||||
const source = Readable.from([compressed]);
|
||||
const decompress = createBrotliDecompress();
|
||||
await pipeline(source, decompress);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import npmStripAnsi from "strip-ansi";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
let bunStripANSI = null;
|
||||
if (!process.env.FORCE_NPM) {
|
||||
bunStripANSI = globalThis?.Bun?.stripANSI;
|
||||
}
|
||||
|
||||
const stripANSI = bunStripANSI || npmStripAnsi;
|
||||
const formatter = new Intl.NumberFormat();
|
||||
const format = n => {
|
||||
return formatter.format(n);
|
||||
};
|
||||
|
||||
const inputs = [
|
||||
["hello world", "no-ansi"],
|
||||
["\x1b[31mred\x1b[39m", "ansi"],
|
||||
["a".repeat(1024 * 16), "long-no-ansi"],
|
||||
["\x1b[31mred\x1b[39m".repeat(1024 * 16), "long-ansi"],
|
||||
];
|
||||
|
||||
const maxInputLength = Math.max(...inputs.map(([input]) => input.length));
|
||||
|
||||
for (const [input, textLabel] of inputs) {
|
||||
const label = bunStripANSI ? "Bun.stripANSI" : "npm/strip-ansi";
|
||||
const name = `${label} ${format(input.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
|
||||
|
||||
bench(name, () => {
|
||||
stripANSI(input);
|
||||
});
|
||||
|
||||
if (bunStripANSI && bunStripANSI(input) !== npmStripAnsi(input)) {
|
||||
throw new Error("strip-ansi mismatch");
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "yaml-benchmark",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
|
||||
"js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
|
||||
|
||||
"yaml": ["yaml@2.8.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw=="],
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "yaml-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1"
|
||||
}
|
||||
}
|
||||
@@ -1,368 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small YAML document
|
||||
const smallYaml = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
email: john@example.com
|
||||
active: true
|
||||
`;
|
||||
|
||||
// Medium YAML document with nested structures
|
||||
const mediumYaml = `
|
||||
company: Acme Corp
|
||||
employees:
|
||||
- name: John Doe
|
||||
age: 30
|
||||
position: Developer
|
||||
skills:
|
||||
- JavaScript
|
||||
- TypeScript
|
||||
- Node.js
|
||||
- name: Jane Smith
|
||||
age: 28
|
||||
position: Designer
|
||||
skills:
|
||||
- Figma
|
||||
- Photoshop
|
||||
- Illustrator
|
||||
- name: Bob Johnson
|
||||
age: 35
|
||||
position: Manager
|
||||
skills:
|
||||
- Leadership
|
||||
- Communication
|
||||
- Planning
|
||||
settings:
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: mydb
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 3600
|
||||
`;
|
||||
|
||||
// Large YAML document with complex structures
|
||||
const largeYaml = `
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: nginx-deployment
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
replicas: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:1.14.2
|
||||
ports:
|
||||
- containerPort: 80
|
||||
env:
|
||||
- name: ENV_VAR_1
|
||||
value: "value1"
|
||||
- name: ENV_VAR_2
|
||||
value: "value2"
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /etc/nginx
|
||||
resources:
|
||||
limits:
|
||||
cpu: "1"
|
||||
memory: "1Gi"
|
||||
requests:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
volumes:
|
||||
- name: config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
items:
|
||||
- key: nginx.conf
|
||||
path: nginx.conf
|
||||
- key: mime.types
|
||||
path: mime.types
|
||||
nodeSelector:
|
||||
disktype: ssd
|
||||
tolerations:
|
||||
- key: "key1"
|
||||
operator: "Equal"
|
||||
value: "value1"
|
||||
effect: "NoSchedule"
|
||||
- key: "key2"
|
||||
operator: "Exists"
|
||||
effect: "NoExecute"
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: kubernetes.io/e2e-az-name
|
||||
operator: In
|
||||
values:
|
||||
- e2e-az1
|
||||
- e2e-az2
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 100
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchExpressions:
|
||||
- key: app
|
||||
operator: In
|
||||
values:
|
||||
- web-store
|
||||
topologyKey: kubernetes.io/hostname
|
||||
`;
|
||||
|
||||
// YAML with anchors and references
|
||||
const yamlWithAnchors = `
|
||||
defaults: &defaults
|
||||
adapter: postgresql
|
||||
host: localhost
|
||||
port: 5432
|
||||
|
||||
development:
|
||||
<<: *defaults
|
||||
database: dev_db
|
||||
|
||||
test:
|
||||
<<: *defaults
|
||||
database: test_db
|
||||
|
||||
production:
|
||||
<<: *defaults
|
||||
database: prod_db
|
||||
host: prod.example.com
|
||||
`;
|
||||
|
||||
// Array of items
|
||||
const arrayYaml = `
|
||||
- id: 1
|
||||
name: Item 1
|
||||
price: 10.99
|
||||
tags: [electronics, gadgets]
|
||||
- id: 2
|
||||
name: Item 2
|
||||
price: 25.50
|
||||
tags: [books, education]
|
||||
- id: 3
|
||||
name: Item 3
|
||||
price: 5.00
|
||||
tags: [food, snacks]
|
||||
- id: 4
|
||||
name: Item 4
|
||||
price: 100.00
|
||||
tags: [electronics, computers]
|
||||
- id: 5
|
||||
name: Item 5
|
||||
price: 15.75
|
||||
tags: [clothing, accessories]
|
||||
`;
|
||||
|
||||
// Multiline strings
|
||||
const multilineYaml = `
|
||||
description: |
|
||||
This is a multiline string
|
||||
that preserves line breaks
|
||||
and indentation.
|
||||
|
||||
It can contain multiple paragraphs
|
||||
and special characters: !@#$%^&*()
|
||||
|
||||
folded: >
|
||||
This is a folded string
|
||||
where line breaks are converted
|
||||
to spaces unless there are
|
||||
|
||||
empty lines like above.
|
||||
plain: This is a plain string
|
||||
quoted: "This is a quoted string with \\"escapes\\""
|
||||
literal: 'This is a literal string with ''quotes'''
|
||||
`;
|
||||
|
||||
// Numbers and special values
|
||||
const numbersYaml = `
|
||||
integer: 42
|
||||
negative: -17
|
||||
float: 3.14159
|
||||
scientific: 1.23e-4
|
||||
infinity: .inf
|
||||
negativeInfinity: -.inf
|
||||
notANumber: .nan
|
||||
octal: 0o755
|
||||
hex: 0xFF
|
||||
binary: 0b1010
|
||||
`;
|
||||
|
||||
// Dates and timestamps
|
||||
const datesYaml = `
|
||||
date: 2024-01-15
|
||||
datetime: 2024-01-15T10:30:00Z
|
||||
timestamp: 2024-01-15 10:30:00.123456789 -05:00
|
||||
canonical: 2024-01-15T10:30:00.123456789Z
|
||||
`;
|
||||
|
||||
// Parse benchmarks
|
||||
group("parse small YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(smallYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(smallYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(smallYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse medium YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(mediumYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(mediumYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(mediumYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse large YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(largeYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(largeYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(largeYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(yamlWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(yamlWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(yamlWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(arrayYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(arrayYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(arrayYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(multilineYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(multilineYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(multilineYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(numbersYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(numbersYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(numbersYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(datesYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(datesYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(datesYaml);
|
||||
});
|
||||
});
|
||||
|
||||
// // Stringify benchmarks
|
||||
// const smallObjJs = jsYaml.load(smallYaml);
|
||||
// const mediumObjJs = jsYaml.load(mediumYaml);
|
||||
// const largeObjJs = jsYaml.load(largeYaml);
|
||||
|
||||
// group("stringify small object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(smallObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify medium object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(mediumObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify large object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(largeObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
await run();
|
||||
@@ -1,407 +0,0 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small object
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
// Medium object with nested structures
|
||||
const mediumObject = {
|
||||
company: "Acme Corp",
|
||||
employees: [
|
||||
{
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
position: "Developer",
|
||||
skills: ["JavaScript", "TypeScript", "Node.js"],
|
||||
},
|
||||
{
|
||||
name: "Jane Smith",
|
||||
age: 28,
|
||||
position: "Designer",
|
||||
skills: ["Figma", "Photoshop", "Illustrator"],
|
||||
},
|
||||
{
|
||||
name: "Bob Johnson",
|
||||
age: 35,
|
||||
position: "Manager",
|
||||
skills: ["Leadership", "Communication", "Planning"],
|
||||
},
|
||||
],
|
||||
settings: {
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "mydb",
|
||||
},
|
||||
cache: {
|
||||
enabled: true,
|
||||
ttl: 3600,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Large object with complex structures
|
||||
const largeObject = {
|
||||
apiVersion: "apps/v1",
|
||||
kind: "Deployment",
|
||||
metadata: {
|
||||
name: "nginx-deployment",
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
replicas: 3,
|
||||
selector: {
|
||||
matchLabels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
template: {
|
||||
metadata: {
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
containers: [
|
||||
{
|
||||
name: "nginx",
|
||||
image: "nginx:1.14.2",
|
||||
ports: [
|
||||
{
|
||||
containerPort: 80,
|
||||
},
|
||||
],
|
||||
env: [
|
||||
{
|
||||
name: "ENV_VAR_1",
|
||||
value: "value1",
|
||||
},
|
||||
{
|
||||
name: "ENV_VAR_2",
|
||||
value: "value2",
|
||||
},
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: "config",
|
||||
mountPath: "/etc/nginx",
|
||||
},
|
||||
],
|
||||
resources: {
|
||||
limits: {
|
||||
cpu: "1",
|
||||
memory: "1Gi",
|
||||
},
|
||||
requests: {
|
||||
cpu: "0.5",
|
||||
memory: "512Mi",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
volumes: [
|
||||
{
|
||||
name: "config",
|
||||
configMap: {
|
||||
name: "nginx-config",
|
||||
items: [
|
||||
{
|
||||
key: "nginx.conf",
|
||||
path: "nginx.conf",
|
||||
},
|
||||
{
|
||||
key: "mime.types",
|
||||
path: "mime.types",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
nodeSelector: {
|
||||
disktype: "ssd",
|
||||
},
|
||||
tolerations: [
|
||||
{
|
||||
key: "key1",
|
||||
operator: "Equal",
|
||||
value: "value1",
|
||||
effect: "NoSchedule",
|
||||
},
|
||||
{
|
||||
key: "key2",
|
||||
operator: "Exists",
|
||||
effect: "NoExecute",
|
||||
},
|
||||
],
|
||||
affinity: {
|
||||
nodeAffinity: {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: {
|
||||
nodeSelectorTerms: [
|
||||
{
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "kubernetes.io/e2e-az-name",
|
||||
operator: "In",
|
||||
values: ["e2e-az1", "e2e-az2"],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
podAntiAffinity: {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
{
|
||||
weight: 100,
|
||||
podAffinityTerm: {
|
||||
labelSelector: {
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "app",
|
||||
operator: "In",
|
||||
values: ["web-store"],
|
||||
},
|
||||
],
|
||||
},
|
||||
topologyKey: "kubernetes.io/hostname",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Object with anchors and references (after resolution)
|
||||
const objectWithAnchors = {
|
||||
defaults: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
},
|
||||
development: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "dev_db",
|
||||
},
|
||||
test: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "test_db",
|
||||
},
|
||||
production: {
|
||||
adapter: "postgresql",
|
||||
host: "prod.example.com",
|
||||
port: 5432,
|
||||
database: "prod_db",
|
||||
},
|
||||
};
|
||||
|
||||
// Array of items
|
||||
const arrayObject = [
|
||||
{
|
||||
id: 1,
|
||||
name: "Item 1",
|
||||
price: 10.99,
|
||||
tags: ["electronics", "gadgets"],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "Item 2",
|
||||
price: 25.5,
|
||||
tags: ["books", "education"],
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "Item 3",
|
||||
price: 5.0,
|
||||
tags: ["food", "snacks"],
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "Item 4",
|
||||
price: 100.0,
|
||||
tags: ["electronics", "computers"],
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "Item 5",
|
||||
price: 15.75,
|
||||
tags: ["clothing", "accessories"],
|
||||
},
|
||||
];
|
||||
|
||||
// Multiline strings
|
||||
const multilineObject = {
|
||||
description:
|
||||
"This is a multiline string\nthat preserves line breaks\nand indentation.\n\nIt can contain multiple paragraphs\nand special characters: !@#$%^&*()\n",
|
||||
folded: "This is a folded string where line breaks are converted to spaces unless there are\nempty lines like above.",
|
||||
plain: "This is a plain string",
|
||||
quoted: 'This is a quoted string with "escapes"',
|
||||
literal: "This is a literal string with 'quotes'",
|
||||
};
|
||||
|
||||
// Numbers and special values
|
||||
const numbersObject = {
|
||||
integer: 42,
|
||||
negative: -17,
|
||||
float: 3.14159,
|
||||
scientific: 0.000123,
|
||||
infinity: Infinity,
|
||||
negativeInfinity: -Infinity,
|
||||
notANumber: NaN,
|
||||
octal: 493, // 0o755
|
||||
hex: 255, // 0xFF
|
||||
binary: 10, // 0b1010
|
||||
};
|
||||
|
||||
// Dates and timestamps
|
||||
const datesObject = {
|
||||
date: new Date("2024-01-15"),
|
||||
datetime: new Date("2024-01-15T10:30:00Z"),
|
||||
timestamp: new Date("2024-01-15T15:30:00.123456789Z"), // Adjusted for UTC-5
|
||||
canonical: new Date("2024-01-15T10:30:00.123456789Z"),
|
||||
};
|
||||
|
||||
// Stringify benchmarks
|
||||
group("stringify small object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(smallObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(smallObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(smallObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify medium object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(mediumObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(mediumObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(mediumObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify large object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(largeObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(largeObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(largeObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(objectWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(objectWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(objectWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(arrayObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(arrayObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(arrayObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(multilineObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(multilineObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(multilineObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(numbersObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(numbersObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(numbersObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(datesObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(datesObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(datesObject);
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
33
build.zig
33
build.zig
@@ -48,7 +48,6 @@ const BunBuildOptions = struct {
|
||||
/// enable debug logs in release builds
|
||||
enable_logs: bool = false,
|
||||
enable_asan: bool,
|
||||
enable_valgrind: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
/// To make iterating on some '@embedFile's faster, we load them at runtime
|
||||
@@ -68,7 +67,6 @@ const BunBuildOptions = struct {
|
||||
|
||||
cached_options_module: ?*Module = null,
|
||||
windows_shim: ?WindowsShim = null,
|
||||
llvm_codegen_threads: ?u32 = null,
|
||||
|
||||
pub fn isBaseline(this: *const BunBuildOptions) bool {
|
||||
return this.arch.isX86() and
|
||||
@@ -96,7 +94,6 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "baseline", this.isBaseline());
|
||||
opts.addOption(bool, "enable_logs", this.enable_logs);
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
opts.addOption(bool, "override_no_export_cpp_apis", this.override_no_export_cpp_apis);
|
||||
@@ -216,21 +213,26 @@ pub fn build(b: *Build) !void {
|
||||
var build_options = BunBuildOptions{
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
|
||||
.os = os,
|
||||
.arch = arch,
|
||||
|
||||
.codegen_path = codegen_path,
|
||||
.codegen_embed = codegen_embed,
|
||||
.no_llvm = no_llvm,
|
||||
.override_no_export_cpp_apis = override_no_export_cpp_apis,
|
||||
|
||||
.version = try Version.parse(bun_version),
|
||||
.canary_revision = canary: {
|
||||
const rev = b.option(u32, "canary", "Treat this as a canary build") orelse 0;
|
||||
break :canary if (rev == 0) null else rev;
|
||||
},
|
||||
|
||||
.reported_nodejs_version = try Version.parse(
|
||||
b.option([]const u8, "reported_nodejs_version", "Reported Node.js version") orelse
|
||||
"0.0.0-unset",
|
||||
),
|
||||
|
||||
.sha = sha: {
|
||||
const sha_buildoption = b.option([]const u8, "sha", "Force the git sha");
|
||||
const sha_github = b.graph.env_map.get("GITHUB_SHA");
|
||||
@@ -266,11 +268,10 @@ pub fn build(b: *Build) !void {
|
||||
|
||||
break :sha sha;
|
||||
},
|
||||
|
||||
.tracy_callstack_depth = b.option(u16, "tracy_callstack_depth", "") orelse 10,
|
||||
.enable_logs = b.option(bool, "enable_logs", "Enable logs in release") orelse false,
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
|
||||
// zig build obj
|
||||
@@ -499,7 +500,6 @@ fn addMultiCheck(
|
||||
.codegen_path = root_build_options.codegen_path,
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
};
|
||||
|
||||
@@ -587,15 +587,9 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
.root_module = root,
|
||||
});
|
||||
configureObj(b, opts, obj);
|
||||
if (enableFastBuild(b)) obj.root_module.strip = true;
|
||||
return obj;
|
||||
}
|
||||
|
||||
fn enableFastBuild(b: *Build) bool {
|
||||
const val = b.graph.env_map.get("BUN_BUILD_FAST") orelse return false;
|
||||
return std.mem.eql(u8, val, "1");
|
||||
}
|
||||
|
||||
fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
// Flags on root module get used for the compilation
|
||||
obj.root_module.omit_frame_pointer = false;
|
||||
@@ -605,16 +599,8 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
|
||||
// Object options
|
||||
obj.use_llvm = !opts.no_llvm;
|
||||
obj.use_lld = if (opts.os == .mac or opts.os == .linux) false else !opts.no_llvm;
|
||||
|
||||
if (opts.optimize == .Debug) {
|
||||
if (@hasField(std.meta.Child(@TypeOf(obj)), "llvm_codegen_threads"))
|
||||
obj.llvm_codegen_threads = opts.llvm_codegen_threads orelse 0;
|
||||
}
|
||||
|
||||
obj.no_link_obj = true;
|
||||
|
||||
if (opts.enable_asan and !enableFastBuild(b)) {
|
||||
obj.use_lld = if (opts.os == .mac) false else !opts.no_llvm;
|
||||
if (opts.enable_asan) {
|
||||
if (@hasField(Build.Module, "sanitize_address")) {
|
||||
obj.root_module.sanitize_address = true;
|
||||
} else {
|
||||
@@ -644,7 +630,7 @@ fn configureObj(b: *Build, opts: *BunBuildOptions, obj: *Compile) void {
|
||||
obj.link_function_sections = true;
|
||||
obj.link_data_sections = true;
|
||||
|
||||
if (opts.optimize == .Debug and opts.enable_valgrind) {
|
||||
if (opts.optimize == .Debug) {
|
||||
obj.root_module.valgrind = true;
|
||||
}
|
||||
}
|
||||
@@ -753,7 +739,6 @@ fn addInternalImports(b: *Build, mod: *Module, opts: *BunBuildOptions) void {
|
||||
.{ .file = "node-fallbacks/url.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/util.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "node-fallbacks/zlib.js", .enable = opts.shouldEmbedCode() },
|
||||
.{ .file = "eval/feedback.ts", .enable = opts.shouldEmbedCode() },
|
||||
}) |entry| {
|
||||
if (!@hasField(@TypeOf(entry), "enable") or entry.enable) {
|
||||
const path = b.pathJoin(&.{ opts.codegen_path, entry.file });
|
||||
|
||||
14
bun.lock
14
bun.lock
@@ -6,7 +6,6 @@
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
@@ -16,7 +15,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "5.9.2",
|
||||
"typescript": "^5.7.2",
|
||||
},
|
||||
},
|
||||
"packages/@types/bun": {
|
||||
@@ -33,6 +32,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^19",
|
||||
"typescript": "^5.0.2",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "^19",
|
||||
@@ -148,7 +148,7 @@
|
||||
|
||||
"@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="],
|
||||
|
||||
"@sentry/types": ["@sentry/types@7.120.4", "", {}, "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q=="],
|
||||
"@sentry/types": ["@sentry/types@7.120.3", "", {}, "sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow=="],
|
||||
|
||||
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
|
||||
|
||||
@@ -160,9 +160,9 @@
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EhBeSYX0Y6ye8pNebpKrwFJq7BoQ8J5SO6NlvNwwHjSj6adXJViPQrKlsyPw7hLBLvckEMO1yxeGdR82YBBlDg=="],
|
||||
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
|
||||
|
||||
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
|
||||
|
||||
@@ -308,11 +308,11 @@
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
|
||||
"typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
|
||||
|
||||
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
|
||||
|
||||
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
|
||||
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
|
||||
|
||||
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
|
||||
|
||||
|
||||
@@ -57,23 +57,6 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
|
||||
if(ENABLE_WINDOWS_CODESIGNING)
|
||||
message(STATUS "Windows code signing: ENABLED")
|
||||
|
||||
# Check for required environment variables
|
||||
if(NOT DEFINED ENV{SM_API_KEY})
|
||||
message(WARNING "SM_API_KEY not set - code signing may fail")
|
||||
endif()
|
||||
if(NOT DEFINED ENV{SM_CLIENT_CERT_FILE})
|
||||
message(WARNING "SM_CLIENT_CERT_FILE not set - code signing may fail")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
if(EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_ABI "musl")
|
||||
@@ -114,18 +97,11 @@ endif()
|
||||
|
||||
if(DEBUG AND ((APPLE AND ARCH STREQUAL "aarch64") OR LINUX))
|
||||
set(DEFAULT_ASAN ON)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
else()
|
||||
set(DEFAULT_ASAN OFF)
|
||||
set(DEFAULT_VALGRIND OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_ASAN BOOL "If ASAN support should be enabled" DEFAULT ${DEFAULT_ASAN})
|
||||
optionx(ENABLE_ZIG_ASAN BOOL "If Zig ASAN support should be enabled" DEFAULT ${ENABLE_ASAN})
|
||||
|
||||
if (NOT ENABLE_ASAN)
|
||||
set(ENABLE_ZIG_ASAN OFF)
|
||||
endif()
|
||||
|
||||
if(RELEASE AND LINUX AND CI AND NOT ENABLE_ASSERTIONS AND NOT ENABLE_ASAN)
|
||||
set(DEFAULT_LTO ON)
|
||||
|
||||
@@ -13,10 +13,7 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
"paths": ["src/js/**/*.{js,ts}"]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
22
cmake/sources/BakeRuntimeSources.txt
Normal file
22
cmake/sources/BakeRuntimeSources.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
src/bake/client/css-reloader.ts
|
||||
src/bake/client/data-view.ts
|
||||
src/bake/client/error-serialization.ts
|
||||
src/bake/client/inspect.ts
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.css
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.ts
|
||||
src/bake/client/overlay.css
|
||||
src/bake/client/overlay.ts
|
||||
src/bake/client/stack-trace.ts
|
||||
src/bake/client/websocket.ts
|
||||
src/bake/debug.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bake/enums.ts
|
||||
src/bake/hmr-module.ts
|
||||
src/bake/hmr-runtime-client.ts
|
||||
src/bake/hmr-runtime-error.ts
|
||||
src/bake/hmr-runtime-server.ts
|
||||
src/bake/server/stack-trace-stub.ts
|
||||
src/bake/shared.ts
|
||||
7
cmake/sources/BindgenSources.txt
Normal file
7
cmake/sources/BindgenSources.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
src/bun.js/bindings/NodeModuleModule.bind.ts
|
||||
src/bun.js/node/node_os.bind.ts
|
||||
src/fmt.bind.ts
|
||||
12
cmake/sources/BunErrorSources.txt
Normal file
12
cmake/sources/BunErrorSources.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
packages/bun-error/bun-error.css
|
||||
packages/bun-error/img/close.png
|
||||
packages/bun-error/img/error.png
|
||||
packages/bun-error/img/powered-by.png
|
||||
packages/bun-error/img/powered-by.webp
|
||||
packages/bun-error/index.tsx
|
||||
packages/bun-error/markdown.ts
|
||||
packages/bun-error/package.json
|
||||
packages/bun-error/runtime-error.ts
|
||||
packages/bun-error/sourcemap.ts
|
||||
packages/bun-error/stack-trace-parser.ts
|
||||
packages/bun-error/tsconfig.json
|
||||
15
cmake/sources/CSources.txt
Normal file
15
cmake/sources/CSources.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
packages/bun-usockets/src/bsd.c
|
||||
packages/bun-usockets/src/context.c
|
||||
packages/bun-usockets/src/crypto/openssl.c
|
||||
packages/bun-usockets/src/eventing/epoll_kqueue.c
|
||||
packages/bun-usockets/src/eventing/libuv.c
|
||||
packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
489
cmake/sources/CxxSources.txt
Normal file
489
cmake/sources/CxxSources.txt
Normal file
@@ -0,0 +1,489 @@
|
||||
packages/bun-usockets/src/crypto/root_certs.cpp
|
||||
packages/bun-usockets/src/crypto/sni_tree.cpp
|
||||
src/bake/BakeGlobalObject.cpp
|
||||
src/bake/BakeProduction.cpp
|
||||
src/bake/BakeSourceProvider.cpp
|
||||
src/bun.js/bindings/ActiveDOMCallback.cpp
|
||||
src/bun.js/bindings/AsymmetricKeyValue.cpp
|
||||
src/bun.js/bindings/AsyncContextFrame.cpp
|
||||
src/bun.js/bindings/Base64Helpers.cpp
|
||||
src/bun.js/bindings/bindings.cpp
|
||||
src/bun.js/bindings/blob.cpp
|
||||
src/bun.js/bindings/bun-simdutf.cpp
|
||||
src/bun.js/bindings/bun-spawn.cpp
|
||||
src/bun.js/bindings/BunClientData.cpp
|
||||
src/bun.js/bindings/BunCommonStrings.cpp
|
||||
src/bun.js/bindings/BunDebugger.cpp
|
||||
src/bun.js/bindings/BunGCOutputConstraint.cpp
|
||||
src/bun.js/bindings/BunGlobalScope.cpp
|
||||
src/bun.js/bindings/BunHttp2CommonStrings.cpp
|
||||
src/bun.js/bindings/BunInjectedScriptHost.cpp
|
||||
src/bun.js/bindings/BunInspector.cpp
|
||||
src/bun.js/bindings/BunJSCEventLoop.cpp
|
||||
src/bun.js/bindings/BunObject.cpp
|
||||
src/bun.js/bindings/BunPlugin.cpp
|
||||
src/bun.js/bindings/BunProcess.cpp
|
||||
src/bun.js/bindings/BunString.cpp
|
||||
src/bun.js/bindings/BunWorkerGlobalScope.cpp
|
||||
src/bun.js/bindings/c-bindings.cpp
|
||||
src/bun.js/bindings/CallSite.cpp
|
||||
src/bun.js/bindings/CallSitePrototype.cpp
|
||||
src/bun.js/bindings/CatchScopeBinding.cpp
|
||||
src/bun.js/bindings/CodeCoverage.cpp
|
||||
src/bun.js/bindings/ConsoleObject.cpp
|
||||
src/bun.js/bindings/Cookie.cpp
|
||||
src/bun.js/bindings/CookieMap.cpp
|
||||
src/bun.js/bindings/coroutine.cpp
|
||||
src/bun.js/bindings/CPUFeatures.cpp
|
||||
src/bun.js/bindings/decodeURIComponentSIMD.cpp
|
||||
src/bun.js/bindings/DOMException.cpp
|
||||
src/bun.js/bindings/DOMFormData.cpp
|
||||
src/bun.js/bindings/DOMURL.cpp
|
||||
src/bun.js/bindings/DOMWrapperWorld.cpp
|
||||
src/bun.js/bindings/DoubleFormatter.cpp
|
||||
src/bun.js/bindings/EncodeURIComponent.cpp
|
||||
src/bun.js/bindings/ErrorCode.cpp
|
||||
src/bun.js/bindings/ErrorStackFrame.cpp
|
||||
src/bun.js/bindings/ErrorStackTrace.cpp
|
||||
src/bun.js/bindings/EventLoopTaskNoContext.cpp
|
||||
src/bun.js/bindings/ExposeNodeModuleGlobals.cpp
|
||||
src/bun.js/bindings/ffi.cpp
|
||||
src/bun.js/bindings/helpers.cpp
|
||||
src/bun.js/bindings/highway_strings.cpp
|
||||
src/bun.js/bindings/HTMLEntryPoint.cpp
|
||||
src/bun.js/bindings/ImportMetaObject.cpp
|
||||
src/bun.js/bindings/inlines.cpp
|
||||
src/bun.js/bindings/InspectorBunFrontendDevServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorHTTPServerAgent.cpp
|
||||
src/bun.js/bindings/InspectorLifecycleAgent.cpp
|
||||
src/bun.js/bindings/InspectorTestReporterAgent.cpp
|
||||
src/bun.js/bindings/InternalForTesting.cpp
|
||||
src/bun.js/bindings/InternalModuleRegistry.cpp
|
||||
src/bun.js/bindings/IPC.cpp
|
||||
src/bun.js/bindings/isBuiltinModule.cpp
|
||||
src/bun.js/bindings/JS2Native.cpp
|
||||
src/bun.js/bindings/JSBigIntBinding.cpp
|
||||
src/bun.js/bindings/JSBuffer.cpp
|
||||
src/bun.js/bindings/JSBufferEncodingType.cpp
|
||||
src/bun.js/bindings/JSBufferList.cpp
|
||||
src/bun.js/bindings/JSBundlerPlugin.cpp
|
||||
src/bun.js/bindings/JSBunRequest.cpp
|
||||
src/bun.js/bindings/JSCommonJSExtensions.cpp
|
||||
src/bun.js/bindings/JSCommonJSModule.cpp
|
||||
src/bun.js/bindings/JSCTaskScheduler.cpp
|
||||
src/bun.js/bindings/JSCTestingHelpers.cpp
|
||||
src/bun.js/bindings/JSDOMExceptionHandling.cpp
|
||||
src/bun.js/bindings/JSDOMFile.cpp
|
||||
src/bun.js/bindings/JSDOMGlobalObject.cpp
|
||||
src/bun.js/bindings/JSDOMWrapper.cpp
|
||||
src/bun.js/bindings/JSDOMWrapperCache.cpp
|
||||
src/bun.js/bindings/JSEnvironmentVariableMap.cpp
|
||||
src/bun.js/bindings/JSFFIFunction.cpp
|
||||
src/bun.js/bindings/JSMockFunction.cpp
|
||||
src/bun.js/bindings/JSNextTickQueue.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogram.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
src/bun.js/bindings/JSStringDecoder.cpp
|
||||
src/bun.js/bindings/JSWrappingFunction.cpp
|
||||
src/bun.js/bindings/JSX509Certificate.cpp
|
||||
src/bun.js/bindings/JSX509CertificateConstructor.cpp
|
||||
src/bun.js/bindings/JSX509CertificatePrototype.cpp
|
||||
src/bun.js/bindings/linux_perf_tracing.cpp
|
||||
src/bun.js/bindings/MarkingConstraint.cpp
|
||||
src/bun.js/bindings/ModuleLoader.cpp
|
||||
src/bun.js/bindings/napi_external.cpp
|
||||
src/bun.js/bindings/napi_finalizer.cpp
|
||||
src/bun.js/bindings/napi_handle_scope.cpp
|
||||
src/bun.js/bindings/napi_type_tag.cpp
|
||||
src/bun.js/bindings/napi.cpp
|
||||
src/bun.js/bindings/NapiClass.cpp
|
||||
src/bun.js/bindings/NapiRef.cpp
|
||||
src/bun.js/bindings/NapiWeakValue.cpp
|
||||
src/bun.js/bindings/ncrpyto_engine.cpp
|
||||
src/bun.js/bindings/ncrypto.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoDhJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDhKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenDsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenEcKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenNidKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoGenRsaKeyPair.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoHkdf.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeygen.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoKeys.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoPrimes.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoSignJob.cpp
|
||||
src/bun.js/bindings/node/crypto/CryptoUtil.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipher.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSCipherPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellman.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroup.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanGroupPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSDiffieHellmanPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDH.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSECDHPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHash.cpp
|
||||
src/bun.js/bindings/node/crypto/JSHmac.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPrivateKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSPublicKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectConstructor.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSecretKeyObjectPrototype.cpp
|
||||
src/bun.js/bindings/node/crypto/JSSign.cpp
|
||||
src/bun.js/bindings/node/crypto/JSVerify.cpp
|
||||
src/bun.js/bindings/node/crypto/KeyObject.cpp
|
||||
src/bun.js/bindings/node/crypto/node_crypto_binding.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsList.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSConnectionsListPrototype.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParser.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserConstructor.cpp
|
||||
src/bun.js/bindings/node/http/JSHTTPParserPrototype.cpp
|
||||
src/bun.js/bindings/node/http/NodeHTTPParser.cpp
|
||||
src/bun.js/bindings/node/NodeTimers.cpp
|
||||
src/bun.js/bindings/NodeAsyncHooks.cpp
|
||||
src/bun.js/bindings/NodeDirent.cpp
|
||||
src/bun.js/bindings/NodeFetch.cpp
|
||||
src/bun.js/bindings/NodeFSStatBinding.cpp
|
||||
src/bun.js/bindings/NodeFSStatFSBinding.cpp
|
||||
src/bun.js/bindings/NodeHTTP.cpp
|
||||
src/bun.js/bindings/NodeTimerObject.cpp
|
||||
src/bun.js/bindings/NodeTLS.cpp
|
||||
src/bun.js/bindings/NodeURL.cpp
|
||||
src/bun.js/bindings/NodeValidator.cpp
|
||||
src/bun.js/bindings/NodeVM.cpp
|
||||
src/bun.js/bindings/NodeVMModule.cpp
|
||||
src/bun.js/bindings/NodeVMScript.cpp
|
||||
src/bun.js/bindings/NodeVMSourceTextModule.cpp
|
||||
src/bun.js/bindings/NodeVMSyntheticModule.cpp
|
||||
src/bun.js/bindings/NoOpForTesting.cpp
|
||||
src/bun.js/bindings/ObjectBindings.cpp
|
||||
src/bun.js/bindings/objects.cpp
|
||||
src/bun.js/bindings/OsBinding.cpp
|
||||
src/bun.js/bindings/Path.cpp
|
||||
src/bun.js/bindings/ProcessBindingBuffer.cpp
|
||||
src/bun.js/bindings/ProcessBindingConstants.cpp
|
||||
src/bun.js/bindings/ProcessBindingFs.cpp
|
||||
src/bun.js/bindings/ProcessBindingHTTPParser.cpp
|
||||
src/bun.js/bindings/ProcessBindingNatives.cpp
|
||||
src/bun.js/bindings/ProcessBindingTTYWrap.cpp
|
||||
src/bun.js/bindings/ProcessBindingUV.cpp
|
||||
src/bun.js/bindings/ProcessIdentifier.cpp
|
||||
src/bun.js/bindings/RegularExpression.cpp
|
||||
src/bun.js/bindings/S3Error.cpp
|
||||
src/bun.js/bindings/ScriptExecutionContext.cpp
|
||||
src/bun.js/bindings/Serialization.cpp
|
||||
src/bun.js/bindings/ServerRouteList.cpp
|
||||
src/bun.js/bindings/spawn.cpp
|
||||
src/bun.js/bindings/SQLClient.cpp
|
||||
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
|
||||
src/bun.js/bindings/Strong.cpp
|
||||
src/bun.js/bindings/Uint8Array.cpp
|
||||
src/bun.js/bindings/Undici.cpp
|
||||
src/bun.js/bindings/URLDecomposition.cpp
|
||||
src/bun.js/bindings/URLSearchParams.cpp
|
||||
src/bun.js/bindings/UtilInspect.cpp
|
||||
src/bun.js/bindings/v8/node.cpp
|
||||
src/bun.js/bindings/v8/shim/Function.cpp
|
||||
src/bun.js/bindings/v8/shim/FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/GlobalInternals.cpp
|
||||
src/bun.js/bindings/v8/shim/Handle.cpp
|
||||
src/bun.js/bindings/v8/shim/HandleScopeBuffer.cpp
|
||||
src/bun.js/bindings/v8/shim/InternalFieldObject.cpp
|
||||
src/bun.js/bindings/v8/shim/Map.cpp
|
||||
src/bun.js/bindings/v8/shim/ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/shim/Oddball.cpp
|
||||
src/bun.js/bindings/v8/shim/TaggedPointer.cpp
|
||||
src/bun.js/bindings/v8/v8_api_internal.cpp
|
||||
src/bun.js/bindings/v8/v8_internal.cpp
|
||||
src/bun.js/bindings/v8/V8Array.cpp
|
||||
src/bun.js/bindings/v8/V8Boolean.cpp
|
||||
src/bun.js/bindings/v8/V8Context.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8EscapableHandleScopeBase.cpp
|
||||
src/bun.js/bindings/v8/V8External.cpp
|
||||
src/bun.js/bindings/v8/V8Function.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionCallbackInfo.cpp
|
||||
src/bun.js/bindings/v8/V8FunctionTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8HandleScope.cpp
|
||||
src/bun.js/bindings/v8/V8Isolate.cpp
|
||||
src/bun.js/bindings/v8/V8Local.cpp
|
||||
src/bun.js/bindings/v8/V8Maybe.cpp
|
||||
src/bun.js/bindings/v8/V8Number.cpp
|
||||
src/bun.js/bindings/v8/V8Object.cpp
|
||||
src/bun.js/bindings/v8/V8ObjectTemplate.cpp
|
||||
src/bun.js/bindings/v8/V8String.cpp
|
||||
src/bun.js/bindings/v8/V8Template.cpp
|
||||
src/bun.js/bindings/v8/V8Value.cpp
|
||||
src/bun.js/bindings/Weak.cpp
|
||||
src/bun.js/bindings/webcore/AbortController.cpp
|
||||
src/bun.js/bindings/webcore/AbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/ActiveDOMObject.cpp
|
||||
src/bun.js/bindings/webcore/BroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/BunBroadcastChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/CloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/CommonAtomStrings.cpp
|
||||
src/bun.js/bindings/webcore/ContextDestructionObserver.cpp
|
||||
src/bun.js/bindings/webcore/CustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/CustomEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/DOMJITHelpers.cpp
|
||||
src/bun.js/bindings/webcore/ErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/ErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/Event.cpp
|
||||
src/bun.js/bindings/webcore/EventContext.cpp
|
||||
src/bun.js/bindings/webcore/EventDispatcher.cpp
|
||||
src/bun.js/bindings/webcore/EventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/EventFactory.cpp
|
||||
src/bun.js/bindings/webcore/EventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/EventNames.cpp
|
||||
src/bun.js/bindings/webcore/EventPath.cpp
|
||||
src/bun.js/bindings/webcore/EventTarget.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetConcrete.cpp
|
||||
src/bun.js/bindings/webcore/EventTargetFactory.cpp
|
||||
src/bun.js/bindings/webcore/FetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/HeaderFieldTokenizer.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderField.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderIdentifiers.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderMap.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderNames.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderStrings.cpp
|
||||
src/bun.js/bindings/webcore/HTTPHeaderValues.cpp
|
||||
src/bun.js/bindings/webcore/HTTPParsers.cpp
|
||||
src/bun.js/bindings/webcore/IdentifierEventListenerMap.cpp
|
||||
src/bun.js/bindings/webcore/InternalWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortAlgorithm.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortController.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignal.cpp
|
||||
src/bun.js/bindings/webcore/JSAbortSignalCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSAddEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSBroadcastChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSByteLengthQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCallbackData.cpp
|
||||
src/bun.js/bindings/webcore/JSCloseEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSCookie.cpp
|
||||
src/bun.js/bindings/webcore/JSCookieMap.cpp
|
||||
src/bun.js/bindings/webcore/JSCountQueuingStrategy.cpp
|
||||
src/bun.js/bindings/webcore/JSCustomEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBindingInternalsBuiltins.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMBuiltinConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConstructorBase.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertDate.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertNumbers.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertStrings.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMConvertWebGL.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMException.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMFormData.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMGuardedObject.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMIterator.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMOperation.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromise.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMPromiseDeferred.cpp
|
||||
src/bun.js/bindings/webcore/JSDOMURL.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSErrorHandler.cpp
|
||||
src/bun.js/bindings/webcore/JSEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventDOMJIT.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitter.cpp
|
||||
src/bun.js/bindings/webcore/JSEventEmitterCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListener.cpp
|
||||
src/bun.js/bindings/webcore/JSEventListenerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSEventModifierInit.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTarget.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSEventTargetNode.cpp
|
||||
src/bun.js/bindings/webcore/JSFetchHeaders.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageChannelCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/JSMessageEventCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePort.cpp
|
||||
src/bun.js/bindings/webcore/JSMessagePortCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEBindings.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEParams.cpp
|
||||
src/bun.js/bindings/webcore/JSMIMEType.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformance.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceEntryCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMarkOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceMeasureOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCallback.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSPerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableByteStreamController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamBYOBRequest.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamDefaultReader.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/JSReadableStreamSourceCustom.cpp
|
||||
src/bun.js/bindings/webcore/JSStructuredSerializeOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSTextDecoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/JSTextEncoderStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStream.cpp
|
||||
src/bun.js/bindings/webcore/JSTransformStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSURLSearchParams.cpp
|
||||
src/bun.js/bindings/webcore/JSWebSocket.cpp
|
||||
src/bun.js/bindings/webcore/JSWorker.cpp
|
||||
src/bun.js/bindings/webcore/JSWorkerOptions.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStream.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamDefaultWriter.cpp
|
||||
src/bun.js/bindings/webcore/JSWritableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/MessageChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessageEvent.cpp
|
||||
src/bun.js/bindings/webcore/MessagePort.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannel.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProvider.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelProviderImpl.cpp
|
||||
src/bun.js/bindings/webcore/MessagePortChannelRegistry.cpp
|
||||
src/bun.js/bindings/webcore/NetworkLoadMetrics.cpp
|
||||
src/bun.js/bindings/webcore/Performance.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceEntry.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMark.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceMeasure.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserver.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceObserverEntryList.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceTiming.cpp
|
||||
src/bun.js/bindings/webcore/PerformanceUserTiming.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStream.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamDefaultController.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSink.cpp
|
||||
src/bun.js/bindings/webcore/ReadableStreamSource.cpp
|
||||
src/bun.js/bindings/webcore/ResourceTiming.cpp
|
||||
src/bun.js/bindings/webcore/RFC7230.cpp
|
||||
src/bun.js/bindings/webcore/SerializedScriptValue.cpp
|
||||
src/bun.js/bindings/webcore/ServerTiming.cpp
|
||||
src/bun.js/bindings/webcore/ServerTimingParser.cpp
|
||||
src/bun.js/bindings/webcore/StructuredClone.cpp
|
||||
src/bun.js/bindings/webcore/TextEncoder.cpp
|
||||
src/bun.js/bindings/webcore/WebCoreTypedArrayController.cpp
|
||||
src/bun.js/bindings/webcore/WebSocket.cpp
|
||||
src/bun.js/bindings/webcore/Worker.cpp
|
||||
src/bun.js/bindings/webcore/WritableStream.cpp
|
||||
src/bun.js/bindings/webcrypto/CommonCryptoDERUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CBCOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFB.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CFBOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTR.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_CTROpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCM.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_GCMOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KW.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmAES_KWOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDH.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDHOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDF.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHKDFOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmPBKDF2OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_OAEPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSAES_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoAlgorithmX25519.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoDigest.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyAES.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyEC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyECOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyHMAC.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKP.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyOKPOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRaw.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSA.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAComponents.cpp
|
||||
src/bun.js/bindings/webcrypto/CryptoKeyRSAOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCbcCfbParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesCtrParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesGcmParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSAesKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAesKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoAlgorithmParameters.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoEcKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoHmacKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyPair.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoKeyUsage.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaHashedKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSCryptoRsaKeyAlgorithm.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdhKeyDeriveParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcdsaParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSEcKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHkdfParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSHmacKeyParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSJsonWebKey.cpp
|
||||
src/bun.js/bindings/webcrypto/JSPbkdf2Params.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedImportParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaHashedKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaKeyGenParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOaepParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaOtherPrimesInfo.cpp
|
||||
src/bun.js/bindings/webcrypto/JSRsaPssParams.cpp
|
||||
src/bun.js/bindings/webcrypto/JSSubtleCrypto.cpp
|
||||
src/bun.js/bindings/webcrypto/JSX25519Params.cpp
|
||||
src/bun.js/bindings/webcrypto/OpenSSLUtilities.cpp
|
||||
src/bun.js/bindings/webcrypto/PhonyWorkQueue.cpp
|
||||
src/bun.js/bindings/webcrypto/SerializedCryptoKeyWrapOpenSSL.cpp
|
||||
src/bun.js/bindings/webcrypto/SubtleCrypto.cpp
|
||||
src/bun.js/bindings/workaround-missing-symbols.cpp
|
||||
src/bun.js/bindings/wtf-bindings.cpp
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp
|
||||
src/bun.js/bindings/ZigGlobalObject.cpp
|
||||
src/bun.js/bindings/ZigSourceProvider.cpp
|
||||
src/bun.js/modules/NodeModuleModule.cpp
|
||||
src/bun.js/modules/NodeTTYModule.cpp
|
||||
src/bun.js/modules/NodeUtilTypesModule.cpp
|
||||
src/bun.js/modules/ObjectModule.cpp
|
||||
src/deps/libuwsockets.cpp
|
||||
src/io/io_darwin.cpp
|
||||
src/vm/Semaphore.cpp
|
||||
src/vm/SigintWatcher.cpp
|
||||
21
cmake/sources/JavaScriptCodegenSources.txt
Normal file
21
cmake/sources/JavaScriptCodegenSources.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
src/codegen/bake-codegen.ts
|
||||
src/codegen/bindgen-lib-internal.ts
|
||||
src/codegen/bindgen-lib.ts
|
||||
src/codegen/bindgen.ts
|
||||
src/codegen/buildTypeFlag.ts
|
||||
src/codegen/builtin-parser.ts
|
||||
src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
src/codegen/generate-js2native.ts
|
||||
src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
164
cmake/sources/JavaScriptSources.txt
Normal file
164
cmake/sources/JavaScriptSources.txt
Normal file
@@ -0,0 +1,164 @@
|
||||
src/js/builtins.d.ts
|
||||
src/js/builtins/Bake.ts
|
||||
src/js/builtins/BundlerPlugin.ts
|
||||
src/js/builtins/ByteLengthQueuingStrategy.ts
|
||||
src/js/builtins/CommonJS.ts
|
||||
src/js/builtins/ConsoleObject.ts
|
||||
src/js/builtins/CountQueuingStrategy.ts
|
||||
src/js/builtins/Glob.ts
|
||||
src/js/builtins/ImportMetaObject.ts
|
||||
src/js/builtins/Ipc.ts
|
||||
src/js/builtins/JSBufferConstructor.ts
|
||||
src/js/builtins/JSBufferPrototype.ts
|
||||
src/js/builtins/NodeModuleObject.ts
|
||||
src/js/builtins/Peek.ts
|
||||
src/js/builtins/ProcessObjectInternals.ts
|
||||
src/js/builtins/ReadableByteStreamController.ts
|
||||
src/js/builtins/ReadableByteStreamInternals.ts
|
||||
src/js/builtins/ReadableStream.ts
|
||||
src/js/builtins/ReadableStreamBYOBReader.ts
|
||||
src/js/builtins/ReadableStreamBYOBRequest.ts
|
||||
src/js/builtins/ReadableStreamDefaultController.ts
|
||||
src/js/builtins/ReadableStreamDefaultReader.ts
|
||||
src/js/builtins/ReadableStreamInternals.ts
|
||||
src/js/builtins/shell.ts
|
||||
src/js/builtins/StreamInternals.ts
|
||||
src/js/builtins/TextDecoderStream.ts
|
||||
src/js/builtins/TextEncoderStream.ts
|
||||
src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
src/js/bun/ffi.ts
|
||||
src/js/bun/sql.ts
|
||||
src/js/bun/sqlite.ts
|
||||
src/js/internal-for-testing.ts
|
||||
src/js/internal/abort_listener.ts
|
||||
src/js/internal/assert/assertion_error.ts
|
||||
src/js/internal/assert/calltracker.ts
|
||||
src/js/internal/assert/myers_diff.ts
|
||||
src/js/internal/assert/utils.ts
|
||||
src/js/internal/buffer.ts
|
||||
src/js/internal/cluster/child.ts
|
||||
src/js/internal/cluster/isPrimary.ts
|
||||
src/js/internal/cluster/primary.ts
|
||||
src/js/internal/cluster/RoundRobinHandle.ts
|
||||
src/js/internal/cluster/Worker.ts
|
||||
src/js/internal/crypto/x509.ts
|
||||
src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
src/js/internal/fs/streams.ts
|
||||
src/js/internal/html.ts
|
||||
src/js/internal/http.ts
|
||||
src/js/internal/http/FakeSocket.ts
|
||||
src/js/internal/linkedlist.ts
|
||||
src/js/internal/primordials.js
|
||||
src/js/internal/promisify.ts
|
||||
src/js/internal/shared.ts
|
||||
src/js/internal/stream.promises.ts
|
||||
src/js/internal/stream.ts
|
||||
src/js/internal/streams/add-abort-signal.ts
|
||||
src/js/internal/streams/compose.ts
|
||||
src/js/internal/streams/destroy.ts
|
||||
src/js/internal/streams/duplex.ts
|
||||
src/js/internal/streams/duplexify.ts
|
||||
src/js/internal/streams/duplexpair.ts
|
||||
src/js/internal/streams/end-of-stream.ts
|
||||
src/js/internal/streams/from.ts
|
||||
src/js/internal/streams/lazy_transform.ts
|
||||
src/js/internal/streams/legacy.ts
|
||||
src/js/internal/streams/native-readable.ts
|
||||
src/js/internal/streams/operators.ts
|
||||
src/js/internal/streams/passthrough.ts
|
||||
src/js/internal/streams/pipeline.ts
|
||||
src/js/internal/streams/readable.ts
|
||||
src/js/internal/streams/state.ts
|
||||
src/js/internal/streams/transform.ts
|
||||
src/js/internal/streams/utils.ts
|
||||
src/js/internal/streams/writable.ts
|
||||
src/js/internal/timers.ts
|
||||
src/js/internal/tls.ts
|
||||
src/js/internal/tty.ts
|
||||
src/js/internal/url.ts
|
||||
src/js/internal/util/colors.ts
|
||||
src/js/internal/util/inspect.d.ts
|
||||
src/js/internal/util/inspect.js
|
||||
src/js/internal/util/mime.ts
|
||||
src/js/internal/validators.ts
|
||||
src/js/internal/webstreams_adapters.ts
|
||||
src/js/node/_http_agent.ts
|
||||
src/js/node/_http_client.ts
|
||||
src/js/node/_http_common.ts
|
||||
src/js/node/_http_incoming.ts
|
||||
src/js/node/_http_outgoing.ts
|
||||
src/js/node/_http_server.ts
|
||||
src/js/node/_stream_duplex.ts
|
||||
src/js/node/_stream_passthrough.ts
|
||||
src/js/node/_stream_readable.ts
|
||||
src/js/node/_stream_transform.ts
|
||||
src/js/node/_stream_wrap.ts
|
||||
src/js/node/_stream_writable.ts
|
||||
src/js/node/_tls_common.ts
|
||||
src/js/node/assert.strict.ts
|
||||
src/js/node/assert.ts
|
||||
src/js/node/async_hooks.ts
|
||||
src/js/node/child_process.ts
|
||||
src/js/node/cluster.ts
|
||||
src/js/node/console.ts
|
||||
src/js/node/crypto.ts
|
||||
src/js/node/dgram.ts
|
||||
src/js/node/diagnostics_channel.ts
|
||||
src/js/node/dns.promises.ts
|
||||
src/js/node/dns.ts
|
||||
src/js/node/domain.ts
|
||||
src/js/node/events.ts
|
||||
src/js/node/fs.promises.ts
|
||||
src/js/node/fs.ts
|
||||
src/js/node/http.ts
|
||||
src/js/node/http2.ts
|
||||
src/js/node/https.ts
|
||||
src/js/node/inspector.ts
|
||||
src/js/node/net.ts
|
||||
src/js/node/os.ts
|
||||
src/js/node/path.posix.ts
|
||||
src/js/node/path.ts
|
||||
src/js/node/path.win32.ts
|
||||
src/js/node/perf_hooks.ts
|
||||
src/js/node/punycode.ts
|
||||
src/js/node/querystring.ts
|
||||
src/js/node/readline.promises.ts
|
||||
src/js/node/readline.ts
|
||||
src/js/node/repl.ts
|
||||
src/js/node/stream.consumers.ts
|
||||
src/js/node/stream.promises.ts
|
||||
src/js/node/stream.ts
|
||||
src/js/node/stream.web.ts
|
||||
src/js/node/test.ts
|
||||
src/js/node/timers.promises.ts
|
||||
src/js/node/timers.ts
|
||||
src/js/node/tls.ts
|
||||
src/js/node/trace_events.ts
|
||||
src/js/node/tty.ts
|
||||
src/js/node/url.ts
|
||||
src/js/node/util.ts
|
||||
src/js/node/v8.ts
|
||||
src/js/node/vm.ts
|
||||
src/js/node/wasi.ts
|
||||
src/js/node/worker_threads.ts
|
||||
src/js/node/zlib.ts
|
||||
src/js/private.d.ts
|
||||
src/js/thirdparty/isomorphic-fetch.ts
|
||||
src/js/thirdparty/node-fetch.ts
|
||||
src/js/thirdparty/undici.js
|
||||
src/js/thirdparty/vercel_fetch.js
|
||||
src/js/thirdparty/ws.js
|
||||
src/js/wasi-runner.js
|
||||
24
cmake/sources/NodeFallbacksSources.txt
Normal file
24
cmake/sources/NodeFallbacksSources.txt
Normal file
@@ -0,0 +1,24 @@
|
||||
src/node-fallbacks/assert.js
|
||||
src/node-fallbacks/buffer.js
|
||||
src/node-fallbacks/console.js
|
||||
src/node-fallbacks/constants.js
|
||||
src/node-fallbacks/crypto.js
|
||||
src/node-fallbacks/domain.js
|
||||
src/node-fallbacks/events.js
|
||||
src/node-fallbacks/http.js
|
||||
src/node-fallbacks/https.js
|
||||
src/node-fallbacks/net.js
|
||||
src/node-fallbacks/os.js
|
||||
src/node-fallbacks/path.js
|
||||
src/node-fallbacks/process.js
|
||||
src/node-fallbacks/punycode.js
|
||||
src/node-fallbacks/querystring.js
|
||||
src/node-fallbacks/stream.js
|
||||
src/node-fallbacks/string_decoder.js
|
||||
src/node-fallbacks/sys.js
|
||||
src/node-fallbacks/timers.js
|
||||
src/node-fallbacks/timers.promises.js
|
||||
src/node-fallbacks/tty.js
|
||||
src/node-fallbacks/url.js
|
||||
src/node-fallbacks/util.js
|
||||
src/node-fallbacks/zlib.js
|
||||
25
cmake/sources/ZigGeneratedClassesSources.txt
Normal file
25
cmake/sources/ZigGeneratedClassesSources.txt
Normal file
@@ -0,0 +1,25 @@
|
||||
src/bun.js/api/BunObject.classes.ts
|
||||
src/bun.js/api/crypto.classes.ts
|
||||
src/bun.js/api/ffi.classes.ts
|
||||
src/bun.js/api/filesystem_router.classes.ts
|
||||
src/bun.js/api/Glob.classes.ts
|
||||
src/bun.js/api/h2.classes.ts
|
||||
src/bun.js/api/html_rewriter.classes.ts
|
||||
src/bun.js/api/JSBundler.classes.ts
|
||||
src/bun.js/api/postgres.classes.ts
|
||||
src/bun.js/api/ResumableSink.classes.ts
|
||||
src/bun.js/api/S3Client.classes.ts
|
||||
src/bun.js/api/S3Stat.classes.ts
|
||||
src/bun.js/api/server.classes.ts
|
||||
src/bun.js/api/Shell.classes.ts
|
||||
src/bun.js/api/ShellArgs.classes.ts
|
||||
src/bun.js/api/sockets.classes.ts
|
||||
src/bun.js/api/sourcemap.classes.ts
|
||||
src/bun.js/api/streams.classes.ts
|
||||
src/bun.js/api/valkey.classes.ts
|
||||
src/bun.js/api/zlib.classes.ts
|
||||
src/bun.js/node/node.classes.ts
|
||||
src/bun.js/resolve_message.classes.ts
|
||||
src/bun.js/test/jest.classes.ts
|
||||
src/bun.js/webcore/encoding.classes.ts
|
||||
src/bun.js/webcore/response.classes.ts
|
||||
893
cmake/sources/ZigSources.txt
Normal file
893
cmake/sources/ZigSources.txt
Normal file
@@ -0,0 +1,893 @@
|
||||
src/allocators.zig
|
||||
src/allocators/AllocationScope.zig
|
||||
src/allocators/basic.zig
|
||||
src/allocators/LinuxMemFdAllocator.zig
|
||||
src/allocators/MaxHeapAllocator.zig
|
||||
src/allocators/MemoryReportingAllocator.zig
|
||||
src/allocators/mimalloc.zig
|
||||
src/allocators/MimallocArena.zig
|
||||
src/allocators/NullableAllocator.zig
|
||||
src/analytics.zig
|
||||
src/analytics/schema.zig
|
||||
src/api/schema.zig
|
||||
src/ast.zig
|
||||
src/ast/Ast.zig
|
||||
src/ast/ASTMemoryAllocator.zig
|
||||
src/ast/B.zig
|
||||
src/ast/base.zig
|
||||
src/ast/Binding.zig
|
||||
src/ast/BundledAst.zig
|
||||
src/ast/CharFreq.zig
|
||||
src/ast/E.zig
|
||||
src/ast/Expr.zig
|
||||
src/ast/G.zig
|
||||
src/ast/Macro.zig
|
||||
src/ast/NewStore.zig
|
||||
src/ast/Op.zig
|
||||
src/ast/S.zig
|
||||
src/ast/Scope.zig
|
||||
src/ast/ServerComponentBoundary.zig
|
||||
src/ast/Stmt.zig
|
||||
src/ast/Symbol.zig
|
||||
src/ast/TS.zig
|
||||
src/ast/UseDirective.zig
|
||||
src/async/posix_event_loop.zig
|
||||
src/async/stub_event_loop.zig
|
||||
src/async/windows_event_loop.zig
|
||||
src/bake.zig
|
||||
src/bake/DevServer.zig
|
||||
src/bake/DevServer/Assets.zig
|
||||
src/bake/DevServer/DirectoryWatchStore.zig
|
||||
src/bake/DevServer/ErrorReportRequest.zig
|
||||
src/bake/DevServer/HmrSocket.zig
|
||||
src/bake/DevServer/HotReloadEvent.zig
|
||||
src/bake/DevServer/IncrementalGraph.zig
|
||||
src/bake/DevServer/memory_cost.zig
|
||||
src/bake/DevServer/PackedMap.zig
|
||||
src/bake/DevServer/RouteBundle.zig
|
||||
src/bake/DevServer/SerializedFailure.zig
|
||||
src/bake/DevServer/SourceMapStore.zig
|
||||
src/bake/DevServer/WatcherAtomics.zig
|
||||
src/bake/FrameworkRouter.zig
|
||||
src/bake/production.zig
|
||||
src/base64/base64.zig
|
||||
src/bits.zig
|
||||
src/boringssl.zig
|
||||
src/brotli.zig
|
||||
src/btjs.zig
|
||||
src/bun.js.zig
|
||||
src/bun.js/api.zig
|
||||
src/bun.js/api/bun/dns.zig
|
||||
src/bun.js/api/bun/h2_frame_parser.zig
|
||||
src/bun.js/api/bun/lshpack.zig
|
||||
src/bun.js/api/bun/process.zig
|
||||
src/bun.js/api/bun/socket.zig
|
||||
src/bun.js/api/bun/socket/Handlers.zig
|
||||
src/bun.js/api/bun/socket/Listener.zig
|
||||
src/bun.js/api/bun/socket/SocketAddress.zig
|
||||
src/bun.js/api/bun/socket/tls_socket_functions.zig
|
||||
src/bun.js/api/bun/socket/WindowsNamedPipeContext.zig
|
||||
src/bun.js/api/bun/spawn.zig
|
||||
src/bun.js/api/bun/spawn/stdio.zig
|
||||
src/bun.js/api/bun/ssl_wrapper.zig
|
||||
src/bun.js/api/bun/subprocess.zig
|
||||
src/bun.js/api/bun/udp_socket.zig
|
||||
src/bun.js/api/bun/x509.zig
|
||||
src/bun.js/api/BunObject.zig
|
||||
src/bun.js/api/crypto.zig
|
||||
src/bun.js/api/crypto/CryptoHasher.zig
|
||||
src/bun.js/api/crypto/EVP.zig
|
||||
src/bun.js/api/crypto/HMAC.zig
|
||||
src/bun.js/api/crypto/PasswordObject.zig
|
||||
src/bun.js/api/crypto/PBKDF2.zig
|
||||
src/bun.js/api/ffi.zig
|
||||
src/bun.js/api/FFIObject.zig
|
||||
src/bun.js/api/filesystem_router.zig
|
||||
src/bun.js/api/glob.zig
|
||||
src/bun.js/api/HashObject.zig
|
||||
src/bun.js/api/html_rewriter.zig
|
||||
src/bun.js/api/JSBundler.zig
|
||||
src/bun.js/api/JSTranspiler.zig
|
||||
src/bun.js/api/server.zig
|
||||
src/bun.js/api/server/AnyRequestContext.zig
|
||||
src/bun.js/api/server/FileRoute.zig
|
||||
src/bun.js/api/server/HTMLBundle.zig
|
||||
src/bun.js/api/server/HTTPStatusText.zig
|
||||
src/bun.js/api/server/InspectorBunFrontendDevServerAgent.zig
|
||||
src/bun.js/api/server/NodeHTTPResponse.zig
|
||||
src/bun.js/api/server/RequestContext.zig
|
||||
src/bun.js/api/server/ServerConfig.zig
|
||||
src/bun.js/api/server/ServerWebSocket.zig
|
||||
src/bun.js/api/server/SSLConfig.zig
|
||||
src/bun.js/api/server/StaticRoute.zig
|
||||
src/bun.js/api/server/WebSocketServerContext.zig
|
||||
src/bun.js/api/streams.classes.zig
|
||||
src/bun.js/api/Timer.zig
|
||||
src/bun.js/api/Timer/EventLoopTimer.zig
|
||||
src/bun.js/api/Timer/ImmediateObject.zig
|
||||
src/bun.js/api/Timer/TimeoutObject.zig
|
||||
src/bun.js/api/Timer/TimerObjectInternals.zig
|
||||
src/bun.js/api/Timer/WTFTimer.zig
|
||||
src/bun.js/api/TOMLObject.zig
|
||||
src/bun.js/api/UnsafeObject.zig
|
||||
src/bun.js/bindgen_test.zig
|
||||
src/bun.js/bindings/AbortSignal.zig
|
||||
src/bun.js/bindings/AnyPromise.zig
|
||||
src/bun.js/bindings/bun-simdutf.zig
|
||||
src/bun.js/bindings/CachedBytecode.zig
|
||||
src/bun.js/bindings/CallFrame.zig
|
||||
src/bun.js/bindings/CatchScope.zig
|
||||
src/bun.js/bindings/codegen.zig
|
||||
src/bun.js/bindings/CommonAbortReason.zig
|
||||
src/bun.js/bindings/CommonStrings.zig
|
||||
src/bun.js/bindings/CPUFeatures.zig
|
||||
src/bun.js/bindings/CustomGetterSetter.zig
|
||||
src/bun.js/bindings/DeferredError.zig
|
||||
src/bun.js/bindings/DOMFormData.zig
|
||||
src/bun.js/bindings/DOMURL.zig
|
||||
src/bun.js/bindings/EncodedJSValue.zig
|
||||
src/bun.js/bindings/Errorable.zig
|
||||
src/bun.js/bindings/ErrorCode.zig
|
||||
src/bun.js/bindings/EventType.zig
|
||||
src/bun.js/bindings/Exception.zig
|
||||
src/bun.js/bindings/FetchHeaders.zig
|
||||
src/bun.js/bindings/FFI.zig
|
||||
src/bun.js/bindings/generated_classes_list.zig
|
||||
src/bun.js/bindings/GetterSetter.zig
|
||||
src/bun.js/bindings/HTTPServerAgent.zig
|
||||
src/bun.js/bindings/JSArray.zig
|
||||
src/bun.js/bindings/JSArrayIterator.zig
|
||||
src/bun.js/bindings/JSBigInt.zig
|
||||
src/bun.js/bindings/JSCell.zig
|
||||
src/bun.js/bindings/JSErrorCode.zig
|
||||
src/bun.js/bindings/JSFunction.zig
|
||||
src/bun.js/bindings/JSGlobalObject.zig
|
||||
src/bun.js/bindings/JSInternalPromise.zig
|
||||
src/bun.js/bindings/JSMap.zig
|
||||
src/bun.js/bindings/JSModuleLoader.zig
|
||||
src/bun.js/bindings/JSObject.zig
|
||||
src/bun.js/bindings/JSPromise.zig
|
||||
src/bun.js/bindings/JSPromiseRejectionOperation.zig
|
||||
src/bun.js/bindings/JSPropertyIterator.zig
|
||||
src/bun.js/bindings/JSRef.zig
|
||||
src/bun.js/bindings/JSRuntimeType.zig
|
||||
src/bun.js/bindings/JSString.zig
|
||||
src/bun.js/bindings/JSType.zig
|
||||
src/bun.js/bindings/JSUint8Array.zig
|
||||
src/bun.js/bindings/JSValue.zig
|
||||
src/bun.js/bindings/NodeModuleModule.zig
|
||||
src/bun.js/bindings/RegularExpression.zig
|
||||
src/bun.js/bindings/ResolvedSource.zig
|
||||
src/bun.js/bindings/ScriptExecutionStatus.zig
|
||||
src/bun.js/bindings/sizes.zig
|
||||
src/bun.js/bindings/SourceProvider.zig
|
||||
src/bun.js/bindings/SourceType.zig
|
||||
src/bun.js/bindings/static_export.zig
|
||||
src/bun.js/bindings/SystemError.zig
|
||||
src/bun.js/bindings/URL.zig
|
||||
src/bun.js/bindings/URLSearchParams.zig
|
||||
src/bun.js/bindings/VM.zig
|
||||
src/bun.js/bindings/WTF.zig
|
||||
src/bun.js/bindings/ZigErrorType.zig
|
||||
src/bun.js/bindings/ZigException.zig
|
||||
src/bun.js/bindings/ZigStackFrame.zig
|
||||
src/bun.js/bindings/ZigStackFrameCode.zig
|
||||
src/bun.js/bindings/ZigStackFramePosition.zig
|
||||
src/bun.js/bindings/ZigStackTrace.zig
|
||||
src/bun.js/bindings/ZigString.zig
|
||||
src/bun.js/BuildMessage.zig
|
||||
src/bun.js/config.zig
|
||||
src/bun.js/ConsoleObject.zig
|
||||
src/bun.js/Counters.zig
|
||||
src/bun.js/Debugger.zig
|
||||
src/bun.js/event_loop.zig
|
||||
src/bun.js/event_loop/AnyEventLoop.zig
|
||||
src/bun.js/event_loop/AnyTask.zig
|
||||
src/bun.js/event_loop/AnyTaskWithExtraContext.zig
|
||||
src/bun.js/event_loop/ConcurrentPromiseTask.zig
|
||||
src/bun.js/event_loop/ConcurrentTask.zig
|
||||
src/bun.js/event_loop/CppTask.zig
|
||||
src/bun.js/event_loop/DeferredTaskQueue.zig
|
||||
src/bun.js/event_loop/EventLoopHandle.zig
|
||||
src/bun.js/event_loop/GarbageCollectionController.zig
|
||||
src/bun.js/event_loop/JSCScheduler.zig
|
||||
src/bun.js/event_loop/ManagedTask.zig
|
||||
src/bun.js/event_loop/MiniEventLoop.zig
|
||||
src/bun.js/event_loop/PosixSignalHandle.zig
|
||||
src/bun.js/event_loop/Task.zig
|
||||
src/bun.js/event_loop/WorkTask.zig
|
||||
src/bun.js/hot_reloader.zig
|
||||
src/bun.js/ipc.zig
|
||||
src/bun.js/javascript_core_c_api.zig
|
||||
src/bun.js/jsc.zig
|
||||
src/bun.js/jsc/array_buffer.zig
|
||||
src/bun.js/jsc/dom_call.zig
|
||||
src/bun.js/jsc/host_fn.zig
|
||||
src/bun.js/jsc/RefString.zig
|
||||
src/bun.js/ModuleLoader.zig
|
||||
src/bun.js/node.zig
|
||||
src/bun.js/node/assert/myers_diff.zig
|
||||
src/bun.js/node/buffer.zig
|
||||
src/bun.js/node/dir_iterator.zig
|
||||
src/bun.js/node/fs_events.zig
|
||||
src/bun.js/node/net/BlockList.zig
|
||||
src/bun.js/node/node_assert_binding.zig
|
||||
src/bun.js/node/node_assert.zig
|
||||
src/bun.js/node/node_cluster_binding.zig
|
||||
src/bun.js/node/node_crypto_binding.zig
|
||||
src/bun.js/node/node_error_binding.zig
|
||||
src/bun.js/node/node_fs_binding.zig
|
||||
src/bun.js/node/node_fs_constant.zig
|
||||
src/bun.js/node/node_fs_stat_watcher.zig
|
||||
src/bun.js/node/node_fs_watcher.zig
|
||||
src/bun.js/node/node_fs.zig
|
||||
src/bun.js/node/node_http_binding.zig
|
||||
src/bun.js/node/node_net_binding.zig
|
||||
src/bun.js/node/node_os.zig
|
||||
src/bun.js/node/node_process.zig
|
||||
src/bun.js/node/node_util_binding.zig
|
||||
src/bun.js/node/node_zlib_binding.zig
|
||||
src/bun.js/node/nodejs_error_code.zig
|
||||
src/bun.js/node/os/constants.zig
|
||||
src/bun.js/node/path_watcher.zig
|
||||
src/bun.js/node/path.zig
|
||||
src/bun.js/node/Stat.zig
|
||||
src/bun.js/node/StatFS.zig
|
||||
src/bun.js/node/time_like.zig
|
||||
src/bun.js/node/types.zig
|
||||
src/bun.js/node/util/parse_args_utils.zig
|
||||
src/bun.js/node/util/parse_args.zig
|
||||
src/bun.js/node/util/validators.zig
|
||||
src/bun.js/node/win_watcher.zig
|
||||
src/bun.js/node/zlib/NativeBrotli.zig
|
||||
src/bun.js/node/zlib/NativeZlib.zig
|
||||
src/bun.js/node/zlib/NativeZstd.zig
|
||||
src/bun.js/ProcessAutoKiller.zig
|
||||
src/bun.js/rare_data.zig
|
||||
src/bun.js/ResolveMessage.zig
|
||||
src/bun.js/RuntimeTranspilerCache.zig
|
||||
src/bun.js/SavedSourceMap.zig
|
||||
src/bun.js/Strong.zig
|
||||
src/bun.js/test/diff_format.zig
|
||||
src/bun.js/test/expect.zig
|
||||
src/bun.js/test/jest.zig
|
||||
src/bun.js/test/pretty_format.zig
|
||||
src/bun.js/test/snapshot.zig
|
||||
src/bun.js/test/test.zig
|
||||
src/bun.js/uuid.zig
|
||||
src/bun.js/virtual_machine_exports.zig
|
||||
src/bun.js/VirtualMachine.zig
|
||||
src/bun.js/Weak.zig
|
||||
src/bun.js/web_worker.zig
|
||||
src/bun.js/webcore.zig
|
||||
src/bun.js/webcore/ArrayBufferSink.zig
|
||||
src/bun.js/webcore/AutoFlusher.zig
|
||||
src/bun.js/webcore/Blob.zig
|
||||
src/bun.js/webcore/blob/copy_file.zig
|
||||
src/bun.js/webcore/blob/read_file.zig
|
||||
src/bun.js/webcore/blob/Store.zig
|
||||
src/bun.js/webcore/blob/write_file.zig
|
||||
src/bun.js/webcore/Body.zig
|
||||
src/bun.js/webcore/ByteBlobLoader.zig
|
||||
src/bun.js/webcore/ByteStream.zig
|
||||
src/bun.js/webcore/CookieMap.zig
|
||||
src/bun.js/webcore/Crypto.zig
|
||||
src/bun.js/webcore/encoding.zig
|
||||
src/bun.js/webcore/EncodingLabel.zig
|
||||
src/bun.js/webcore/fetch.zig
|
||||
src/bun.js/webcore/FileReader.zig
|
||||
src/bun.js/webcore/FileSink.zig
|
||||
src/bun.js/webcore/ObjectURLRegistry.zig
|
||||
src/bun.js/webcore/prompt.zig
|
||||
src/bun.js/webcore/ReadableStream.zig
|
||||
src/bun.js/webcore/Request.zig
|
||||
src/bun.js/webcore/Response.zig
|
||||
src/bun.js/webcore/ResumableSink.zig
|
||||
src/bun.js/webcore/S3Client.zig
|
||||
src/bun.js/webcore/S3File.zig
|
||||
src/bun.js/webcore/S3Stat.zig
|
||||
src/bun.js/webcore/ScriptExecutionContext.zig
|
||||
src/bun.js/webcore/Sink.zig
|
||||
src/bun.js/webcore/streams.zig
|
||||
src/bun.js/webcore/TextDecoder.zig
|
||||
src/bun.js/webcore/TextEncoder.zig
|
||||
src/bun.js/webcore/TextEncoderStreamEncoder.zig
|
||||
src/bun.zig
|
||||
src/bundler/AstBuilder.zig
|
||||
src/bundler/bundle_v2.zig
|
||||
src/bundler/BundleThread.zig
|
||||
src/bundler/Chunk.zig
|
||||
src/bundler/DeferredBatchTask.zig
|
||||
src/bundler/entry_points.zig
|
||||
src/bundler/Graph.zig
|
||||
src/bundler/HTMLImportManifest.zig
|
||||
src/bundler/linker_context/computeChunks.zig
|
||||
src/bundler/linker_context/computeCrossChunkDependencies.zig
|
||||
src/bundler/linker_context/convertStmtsForChunk.zig
|
||||
src/bundler/linker_context/convertStmtsForChunkForDevServer.zig
|
||||
src/bundler/linker_context/doStep5.zig
|
||||
src/bundler/linker_context/findAllImportedPartsInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedCSSFilesInJSOrder.zig
|
||||
src/bundler/linker_context/findImportedFilesInCSSOrder.zig
|
||||
src/bundler/linker_context/generateChunksInParallel.zig
|
||||
src/bundler/linker_context/generateCodeForFileInChunkJS.zig
|
||||
src/bundler/linker_context/generateCodeForLazyExport.zig
|
||||
src/bundler/linker_context/generateCompileResultForCssChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForHtmlChunk.zig
|
||||
src/bundler/linker_context/generateCompileResultForJSChunk.zig
|
||||
src/bundler/linker_context/OutputFileListBuilder.zig
|
||||
src/bundler/linker_context/postProcessCSSChunk.zig
|
||||
src/bundler/linker_context/postProcessHTMLChunk.zig
|
||||
src/bundler/linker_context/postProcessJSChunk.zig
|
||||
src/bundler/linker_context/prepareCssAstsForChunk.zig
|
||||
src/bundler/linker_context/renameSymbolsInChunk.zig
|
||||
src/bundler/linker_context/scanImportsAndExports.zig
|
||||
src/bundler/linker_context/StaticRouteVisitor.zig
|
||||
src/bundler/linker_context/writeOutputFilesToDisk.zig
|
||||
src/bundler/LinkerContext.zig
|
||||
src/bundler/LinkerGraph.zig
|
||||
src/bundler/ParseTask.zig
|
||||
src/bundler/ServerComponentParseTask.zig
|
||||
src/bundler/ThreadPool.zig
|
||||
src/bunfig.zig
|
||||
src/cache.zig
|
||||
src/ci_info.zig
|
||||
src/cli.zig
|
||||
src/cli/add_command.zig
|
||||
src/cli/add_completions.zig
|
||||
src/cli/Arguments.zig
|
||||
src/cli/audit_command.zig
|
||||
src/cli/build_command.zig
|
||||
src/cli/bunx_command.zig
|
||||
src/cli/colon_list_type.zig
|
||||
src/cli/create_command.zig
|
||||
src/cli/discord_command.zig
|
||||
src/cli/exec_command.zig
|
||||
src/cli/filter_arg.zig
|
||||
src/cli/filter_run.zig
|
||||
src/cli/init_command.zig
|
||||
src/cli/install_command.zig
|
||||
src/cli/install_completions_command.zig
|
||||
src/cli/link_command.zig
|
||||
src/cli/list-of-yarn-commands.zig
|
||||
src/cli/outdated_command.zig
|
||||
src/cli/pack_command.zig
|
||||
src/cli/package_manager_command.zig
|
||||
src/cli/patch_command.zig
|
||||
src/cli/patch_commit_command.zig
|
||||
src/cli/pm_pkg_command.zig
|
||||
src/cli/pm_trusted_command.zig
|
||||
src/cli/pm_version_command.zig
|
||||
src/cli/pm_view_command.zig
|
||||
src/cli/pm_why_command.zig
|
||||
src/cli/publish_command.zig
|
||||
src/cli/remove_command.zig
|
||||
src/cli/run_command.zig
|
||||
src/cli/shell_completions.zig
|
||||
src/cli/test_command.zig
|
||||
src/cli/test/Scanner.zig
|
||||
src/cli/unlink_command.zig
|
||||
src/cli/update_command.zig
|
||||
src/cli/update_interactive_command.zig
|
||||
src/cli/upgrade_command.zig
|
||||
src/cli/why_command.zig
|
||||
src/codegen/process_windows_translate_c.zig
|
||||
src/collections.zig
|
||||
src/collections/baby_list.zig
|
||||
src/collections/bit_set.zig
|
||||
src/collections/hive_array.zig
|
||||
src/collections/multi_array_list.zig
|
||||
src/collections/safety.zig
|
||||
src/compile_target.zig
|
||||
src/comptime_string_map.zig
|
||||
src/copy_file.zig
|
||||
src/crash_handler.zig
|
||||
src/create/SourceFileProjectGenerator.zig
|
||||
src/csrf.zig
|
||||
src/css_scanner.zig
|
||||
src/css/compat.zig
|
||||
src/css/context.zig
|
||||
src/css/css_internals.zig
|
||||
src/css/css_modules.zig
|
||||
src/css/css_parser.zig
|
||||
src/css/declaration.zig
|
||||
src/css/dependencies.zig
|
||||
src/css/error.zig
|
||||
src/css/generics.zig
|
||||
src/css/logical.zig
|
||||
src/css/media_query.zig
|
||||
src/css/prefixes.zig
|
||||
src/css/printer.zig
|
||||
src/css/properties/align.zig
|
||||
src/css/properties/animation.zig
|
||||
src/css/properties/background.zig
|
||||
src/css/properties/border_image.zig
|
||||
src/css/properties/border_radius.zig
|
||||
src/css/properties/border.zig
|
||||
src/css/properties/box_shadow.zig
|
||||
src/css/properties/contain.zig
|
||||
src/css/properties/css_modules.zig
|
||||
src/css/properties/custom.zig
|
||||
src/css/properties/display.zig
|
||||
src/css/properties/effects.zig
|
||||
src/css/properties/flex.zig
|
||||
src/css/properties/font.zig
|
||||
src/css/properties/grid.zig
|
||||
src/css/properties/list.zig
|
||||
src/css/properties/margin_padding.zig
|
||||
src/css/properties/masking.zig
|
||||
src/css/properties/outline.zig
|
||||
src/css/properties/overflow.zig
|
||||
src/css/properties/position.zig
|
||||
src/css/properties/prefix_handler.zig
|
||||
src/css/properties/properties_generated.zig
|
||||
src/css/properties/properties_impl.zig
|
||||
src/css/properties/properties.zig
|
||||
src/css/properties/shape.zig
|
||||
src/css/properties/size.zig
|
||||
src/css/properties/svg.zig
|
||||
src/css/properties/text.zig
|
||||
src/css/properties/transform.zig
|
||||
src/css/properties/transition.zig
|
||||
src/css/properties/ui.zig
|
||||
src/css/rules/container.zig
|
||||
src/css/rules/counter_style.zig
|
||||
src/css/rules/custom_media.zig
|
||||
src/css/rules/document.zig
|
||||
src/css/rules/font_face.zig
|
||||
src/css/rules/font_palette_values.zig
|
||||
src/css/rules/import.zig
|
||||
src/css/rules/keyframes.zig
|
||||
src/css/rules/layer.zig
|
||||
src/css/rules/media.zig
|
||||
src/css/rules/namespace.zig
|
||||
src/css/rules/nesting.zig
|
||||
src/css/rules/page.zig
|
||||
src/css/rules/property.zig
|
||||
src/css/rules/rules.zig
|
||||
src/css/rules/scope.zig
|
||||
src/css/rules/starting_style.zig
|
||||
src/css/rules/style.zig
|
||||
src/css/rules/supports.zig
|
||||
src/css/rules/tailwind.zig
|
||||
src/css/rules/unknown.zig
|
||||
src/css/rules/viewport.zig
|
||||
src/css/selectors/builder.zig
|
||||
src/css/selectors/parser.zig
|
||||
src/css/selectors/selector.zig
|
||||
src/css/small_list.zig
|
||||
src/css/sourcemap.zig
|
||||
src/css/targets.zig
|
||||
src/css/values/alpha.zig
|
||||
src/css/values/angle.zig
|
||||
src/css/values/calc.zig
|
||||
src/css/values/color_generated.zig
|
||||
src/css/values/color_js.zig
|
||||
src/css/values/color.zig
|
||||
src/css/values/css_string.zig
|
||||
src/css/values/easing.zig
|
||||
src/css/values/gradient.zig
|
||||
src/css/values/ident.zig
|
||||
src/css/values/image.zig
|
||||
src/css/values/length.zig
|
||||
src/css/values/number.zig
|
||||
src/css/values/percentage.zig
|
||||
src/css/values/position.zig
|
||||
src/css/values/ratio.zig
|
||||
src/css/values/rect.zig
|
||||
src/css/values/resolution.zig
|
||||
src/css/values/size.zig
|
||||
src/css/values/syntax.zig
|
||||
src/css/values/time.zig
|
||||
src/css/values/url.zig
|
||||
src/css/values/values.zig
|
||||
src/darwin.zig
|
||||
src/defines-table.zig
|
||||
src/defines.zig
|
||||
src/deps/boringssl.translated.zig
|
||||
src/deps/brotli_c.zig
|
||||
src/deps/c_ares.zig
|
||||
src/deps/diffz/DiffMatchPatch.zig
|
||||
src/deps/libdeflate.zig
|
||||
src/deps/libuv.zig
|
||||
src/deps/lol-html.zig
|
||||
src/deps/picohttp.zig
|
||||
src/deps/picohttpparser.zig
|
||||
src/deps/tcc.zig
|
||||
src/deps/uws.zig
|
||||
src/deps/uws/App.zig
|
||||
src/deps/uws/BodyReaderMixin.zig
|
||||
src/deps/uws/ConnectingSocket.zig
|
||||
src/deps/uws/InternalLoopData.zig
|
||||
src/deps/uws/ListenSocket.zig
|
||||
src/deps/uws/Loop.zig
|
||||
src/deps/uws/Request.zig
|
||||
src/deps/uws/Response.zig
|
||||
src/deps/uws/socket.zig
|
||||
src/deps/uws/SocketContext.zig
|
||||
src/deps/uws/Timer.zig
|
||||
src/deps/uws/udp.zig
|
||||
src/deps/uws/UpgradedDuplex.zig
|
||||
src/deps/uws/us_socket_t.zig
|
||||
src/deps/uws/WebSocket.zig
|
||||
src/deps/uws/WindowsNamedPipe.zig
|
||||
src/deps/zig-clap/clap.zig
|
||||
src/deps/zig-clap/clap/args.zig
|
||||
src/deps/zig-clap/clap/comptime.zig
|
||||
src/deps/zig-clap/clap/streaming.zig
|
||||
src/deps/zlib.posix.zig
|
||||
src/deps/zlib.shared.zig
|
||||
src/deps/zlib.win32.zig
|
||||
src/deps/zstd.zig
|
||||
src/dir.zig
|
||||
src/dns.zig
|
||||
src/env_loader.zig
|
||||
src/env.zig
|
||||
src/errno/darwin_errno.zig
|
||||
src/errno/linux_errno.zig
|
||||
src/errno/windows_errno.zig
|
||||
src/fd.zig
|
||||
src/feature_flags.zig
|
||||
src/fmt.zig
|
||||
src/fs.zig
|
||||
src/fs/stat_hash.zig
|
||||
src/generated_perf_trace_events.zig
|
||||
src/generated_versions_list.zig
|
||||
src/glob.zig
|
||||
src/glob/GlobWalker.zig
|
||||
src/glob/match.zig
|
||||
src/Global.zig
|
||||
src/heap_breakdown.zig
|
||||
src/highway.zig
|
||||
src/hmac.zig
|
||||
src/HTMLScanner.zig
|
||||
src/http.zig
|
||||
src/http/AsyncHTTP.zig
|
||||
src/http/CertificateInfo.zig
|
||||
src/http/Decompressor.zig
|
||||
src/http/Encoding.zig
|
||||
src/http/FetchRedirect.zig
|
||||
src/http/HeaderBuilder.zig
|
||||
src/http/Headers.zig
|
||||
src/http/HTTPCertError.zig
|
||||
src/http/HTTPContext.zig
|
||||
src/http/HTTPRequestBody.zig
|
||||
src/http/HTTPThread.zig
|
||||
src/http/InitError.zig
|
||||
src/http/InternalState.zig
|
||||
src/http/Method.zig
|
||||
src/http/mime_type_list_enum.zig
|
||||
src/http/MimeType.zig
|
||||
src/http/ProxyTunnel.zig
|
||||
src/http/SendFile.zig
|
||||
src/http/Signals.zig
|
||||
src/http/ThreadSafeStreamBuffer.zig
|
||||
src/http/URLPath.zig
|
||||
src/http/websocket_client.zig
|
||||
src/http/websocket_client/CppWebSocket.zig
|
||||
src/http/websocket_client/WebSocketDeflate.zig
|
||||
src/http/websocket_client/WebSocketUpgradeClient.zig
|
||||
src/http/websocket_http_client.zig
|
||||
src/http/websocket.zig
|
||||
src/http/zlib.zig
|
||||
src/identity_context.zig
|
||||
src/import_record.zig
|
||||
src/ini.zig
|
||||
src/install/bin.zig
|
||||
src/install/dependency.zig
|
||||
src/install/ExternalSlice.zig
|
||||
src/install/extract_tarball.zig
|
||||
src/install/hoisted_install.zig
|
||||
src/install/install_binding.zig
|
||||
src/install/install.zig
|
||||
src/install/integrity.zig
|
||||
src/install/isolated_install.zig
|
||||
src/install/isolated_install/FileCopier.zig
|
||||
src/install/isolated_install/Hardlinker.zig
|
||||
src/install/isolated_install/Installer.zig
|
||||
src/install/isolated_install/Store.zig
|
||||
src/install/isolated_install/Symlinker.zig
|
||||
src/install/lifecycle_script_runner.zig
|
||||
src/install/lockfile.zig
|
||||
src/install/lockfile/Buffers.zig
|
||||
src/install/lockfile/bun.lock.zig
|
||||
src/install/lockfile/bun.lockb.zig
|
||||
src/install/lockfile/CatalogMap.zig
|
||||
src/install/lockfile/lockfile_json_stringify_for_debugging.zig
|
||||
src/install/lockfile/OverrideMap.zig
|
||||
src/install/lockfile/Package.zig
|
||||
src/install/lockfile/Package/Meta.zig
|
||||
src/install/lockfile/Package/Scripts.zig
|
||||
src/install/lockfile/Package/WorkspaceMap.zig
|
||||
src/install/lockfile/printer/tree_printer.zig
|
||||
src/install/lockfile/printer/Yarn.zig
|
||||
src/install/lockfile/Tree.zig
|
||||
src/install/migration.zig
|
||||
src/install/NetworkTask.zig
|
||||
src/install/npm.zig
|
||||
src/install/PackageInstall.zig
|
||||
src/install/PackageInstaller.zig
|
||||
src/install/PackageManager.zig
|
||||
src/install/PackageManager/CommandLineArguments.zig
|
||||
src/install/PackageManager/install_with_manager.zig
|
||||
src/install/PackageManager/PackageJSONEditor.zig
|
||||
src/install/PackageManager/PackageManagerDirectories.zig
|
||||
src/install/PackageManager/PackageManagerEnqueue.zig
|
||||
src/install/PackageManager/PackageManagerLifecycle.zig
|
||||
src/install/PackageManager/PackageManagerOptions.zig
|
||||
src/install/PackageManager/PackageManagerResolution.zig
|
||||
src/install/PackageManager/patchPackage.zig
|
||||
src/install/PackageManager/processDependencyList.zig
|
||||
src/install/PackageManager/ProgressStrings.zig
|
||||
src/install/PackageManager/runTasks.zig
|
||||
src/install/PackageManager/updatePackageJSONAndInstall.zig
|
||||
src/install/PackageManager/UpdateRequest.zig
|
||||
src/install/PackageManager/WorkspacePackageJSONCache.zig
|
||||
src/install/PackageManagerTask.zig
|
||||
src/install/PackageManifestMap.zig
|
||||
src/install/padding_checker.zig
|
||||
src/install/patch_install.zig
|
||||
src/install/repository.zig
|
||||
src/install/resolution.zig
|
||||
src/install/resolvers/folder_resolver.zig
|
||||
src/install/versioned_url.zig
|
||||
src/install/windows-shim/BinLinkingShim.zig
|
||||
src/install/windows-shim/bun_shim_impl.zig
|
||||
src/interchange.zig
|
||||
src/interchange/json.zig
|
||||
src/interchange/toml.zig
|
||||
src/interchange/toml/lexer.zig
|
||||
src/io/heap.zig
|
||||
src/io/io.zig
|
||||
src/io/MaxBuf.zig
|
||||
src/io/openForWriting.zig
|
||||
src/io/PipeReader.zig
|
||||
src/io/pipes.zig
|
||||
src/io/PipeWriter.zig
|
||||
src/io/source.zig
|
||||
src/js_lexer_tables.zig
|
||||
src/js_lexer.zig
|
||||
src/js_lexer/identifier.zig
|
||||
src/js_parser.zig
|
||||
src/js_printer.zig
|
||||
src/jsc_stub.zig
|
||||
src/libarchive/libarchive-bindings.zig
|
||||
src/libarchive/libarchive.zig
|
||||
src/linear_fifo.zig
|
||||
src/linker.zig
|
||||
src/linux.zig
|
||||
src/logger.zig
|
||||
src/macho.zig
|
||||
src/main_test.zig
|
||||
src/main_wasm.zig
|
||||
src/main.zig
|
||||
src/meta.zig
|
||||
src/napi/napi.zig
|
||||
src/node_fallbacks.zig
|
||||
src/open.zig
|
||||
src/options.zig
|
||||
src/output.zig
|
||||
src/OutputFile.zig
|
||||
src/patch.zig
|
||||
src/paths.zig
|
||||
src/paths/EnvPath.zig
|
||||
src/paths/path_buffer_pool.zig
|
||||
src/paths/Path.zig
|
||||
src/pe.zig
|
||||
src/perf.zig
|
||||
src/pool.zig
|
||||
src/Progress.zig
|
||||
src/ptr.zig
|
||||
src/ptr/Cow.zig
|
||||
src/ptr/CowSlice.zig
|
||||
src/ptr/ref_count.zig
|
||||
src/ptr/tagged_pointer.zig
|
||||
src/ptr/weak_ptr.zig
|
||||
src/renamer.zig
|
||||
src/resolver/data_url.zig
|
||||
src/resolver/dir_info.zig
|
||||
src/resolver/package_json.zig
|
||||
src/resolver/resolve_path.zig
|
||||
src/resolver/resolver.zig
|
||||
src/resolver/tsconfig_json.zig
|
||||
src/result.zig
|
||||
src/router.zig
|
||||
src/runtime.zig
|
||||
src/s3/acl.zig
|
||||
src/s3/client.zig
|
||||
src/s3/credentials.zig
|
||||
src/s3/download_stream.zig
|
||||
src/s3/error.zig
|
||||
src/s3/list_objects.zig
|
||||
src/s3/multipart_options.zig
|
||||
src/s3/multipart.zig
|
||||
src/s3/simple_request.zig
|
||||
src/s3/storage_class.zig
|
||||
src/semver.zig
|
||||
src/semver/ExternalString.zig
|
||||
src/semver/SemverObject.zig
|
||||
src/semver/SemverQuery.zig
|
||||
src/semver/SemverRange.zig
|
||||
src/semver/SemverString.zig
|
||||
src/semver/SlicedString.zig
|
||||
src/semver/Version.zig
|
||||
src/sha.zig
|
||||
src/shell/AllocScope.zig
|
||||
src/shell/braces.zig
|
||||
src/shell/Builtin.zig
|
||||
src/shell/builtin/basename.zig
|
||||
src/shell/builtin/cat.zig
|
||||
src/shell/builtin/cd.zig
|
||||
src/shell/builtin/cp.zig
|
||||
src/shell/builtin/dirname.zig
|
||||
src/shell/builtin/echo.zig
|
||||
src/shell/builtin/exit.zig
|
||||
src/shell/builtin/export.zig
|
||||
src/shell/builtin/false.zig
|
||||
src/shell/builtin/ls.zig
|
||||
src/shell/builtin/mkdir.zig
|
||||
src/shell/builtin/mv.zig
|
||||
src/shell/builtin/pwd.zig
|
||||
src/shell/builtin/rm.zig
|
||||
src/shell/builtin/seq.zig
|
||||
src/shell/builtin/touch.zig
|
||||
src/shell/builtin/true.zig
|
||||
src/shell/builtin/which.zig
|
||||
src/shell/builtin/yes.zig
|
||||
src/shell/EnvMap.zig
|
||||
src/shell/EnvStr.zig
|
||||
src/shell/interpreter.zig
|
||||
src/shell/IO.zig
|
||||
src/shell/IOReader.zig
|
||||
src/shell/IOWriter.zig
|
||||
src/shell/ParsedShellScript.zig
|
||||
src/shell/RefCountedStr.zig
|
||||
src/shell/shell.zig
|
||||
src/shell/states/Assigns.zig
|
||||
src/shell/states/Async.zig
|
||||
src/shell/states/Base.zig
|
||||
src/shell/states/Binary.zig
|
||||
src/shell/states/Cmd.zig
|
||||
src/shell/states/CondExpr.zig
|
||||
src/shell/states/Expansion.zig
|
||||
src/shell/states/If.zig
|
||||
src/shell/states/Pipeline.zig
|
||||
src/shell/states/Script.zig
|
||||
src/shell/states/Stmt.zig
|
||||
src/shell/states/Subshell.zig
|
||||
src/shell/subproc.zig
|
||||
src/shell/util.zig
|
||||
src/shell/Yield.zig
|
||||
src/sourcemap/CodeCoverage.zig
|
||||
src/sourcemap/JSSourceMap.zig
|
||||
src/sourcemap/LineOffsetTable.zig
|
||||
src/sourcemap/sourcemap.zig
|
||||
src/sourcemap/VLQ.zig
|
||||
src/sql/postgres.zig
|
||||
src/sql/postgres/AnyPostgresError.zig
|
||||
src/sql/postgres/AuthenticationState.zig
|
||||
src/sql/postgres/CommandTag.zig
|
||||
src/sql/postgres/ConnectionFlags.zig
|
||||
src/sql/postgres/Data.zig
|
||||
src/sql/postgres/DataCell.zig
|
||||
src/sql/postgres/DebugSocketMonitorReader.zig
|
||||
src/sql/postgres/DebugSocketMonitorWriter.zig
|
||||
src/sql/postgres/ObjectIterator.zig
|
||||
src/sql/postgres/PostgresCachedStructure.zig
|
||||
src/sql/postgres/PostgresProtocol.zig
|
||||
src/sql/postgres/PostgresRequest.zig
|
||||
src/sql/postgres/PostgresSQLConnection.zig
|
||||
src/sql/postgres/PostgresSQLContext.zig
|
||||
src/sql/postgres/PostgresSQLQuery.zig
|
||||
src/sql/postgres/PostgresSQLQueryResultMode.zig
|
||||
src/sql/postgres/PostgresSQLStatement.zig
|
||||
src/sql/postgres/PostgresTypes.zig
|
||||
src/sql/postgres/protocol/ArrayList.zig
|
||||
src/sql/postgres/protocol/Authentication.zig
|
||||
src/sql/postgres/protocol/BackendKeyData.zig
|
||||
src/sql/postgres/protocol/Close.zig
|
||||
src/sql/postgres/protocol/ColumnIdentifier.zig
|
||||
src/sql/postgres/protocol/CommandComplete.zig
|
||||
src/sql/postgres/protocol/CopyData.zig
|
||||
src/sql/postgres/protocol/CopyFail.zig
|
||||
src/sql/postgres/protocol/CopyInResponse.zig
|
||||
src/sql/postgres/protocol/CopyOutResponse.zig
|
||||
src/sql/postgres/protocol/DataRow.zig
|
||||
src/sql/postgres/protocol/DecoderWrap.zig
|
||||
src/sql/postgres/protocol/Describe.zig
|
||||
src/sql/postgres/protocol/ErrorResponse.zig
|
||||
src/sql/postgres/protocol/Execute.zig
|
||||
src/sql/postgres/protocol/FieldDescription.zig
|
||||
src/sql/postgres/protocol/FieldMessage.zig
|
||||
src/sql/postgres/protocol/FieldType.zig
|
||||
src/sql/postgres/protocol/NegotiateProtocolVersion.zig
|
||||
src/sql/postgres/protocol/NewReader.zig
|
||||
src/sql/postgres/protocol/NewWriter.zig
|
||||
src/sql/postgres/protocol/NoticeResponse.zig
|
||||
src/sql/postgres/protocol/NotificationResponse.zig
|
||||
src/sql/postgres/protocol/ParameterDescription.zig
|
||||
src/sql/postgres/protocol/ParameterStatus.zig
|
||||
src/sql/postgres/protocol/Parse.zig
|
||||
src/sql/postgres/protocol/PasswordMessage.zig
|
||||
src/sql/postgres/protocol/PortalOrPreparedStatement.zig
|
||||
src/sql/postgres/protocol/ReadyForQuery.zig
|
||||
src/sql/postgres/protocol/RowDescription.zig
|
||||
src/sql/postgres/protocol/SASLInitialResponse.zig
|
||||
src/sql/postgres/protocol/SASLResponse.zig
|
||||
src/sql/postgres/protocol/StackReader.zig
|
||||
src/sql/postgres/protocol/StartupMessage.zig
|
||||
src/sql/postgres/protocol/TransactionStatusIndicator.zig
|
||||
src/sql/postgres/protocol/WriteWrap.zig
|
||||
src/sql/postgres/protocol/zHelpers.zig
|
||||
src/sql/postgres/QueryBindingIterator.zig
|
||||
src/sql/postgres/SASL.zig
|
||||
src/sql/postgres/Signature.zig
|
||||
src/sql/postgres/SocketMonitor.zig
|
||||
src/sql/postgres/SSLMode.zig
|
||||
src/sql/postgres/Status.zig
|
||||
src/sql/postgres/TLSStatus.zig
|
||||
src/sql/postgres/types/bool.zig
|
||||
src/sql/postgres/types/bytea.zig
|
||||
src/sql/postgres/types/date.zig
|
||||
src/sql/postgres/types/int_types.zig
|
||||
src/sql/postgres/types/json.zig
|
||||
src/sql/postgres/types/numeric.zig
|
||||
src/sql/postgres/types/PostgresString.zig
|
||||
src/sql/postgres/types/Tag.zig
|
||||
src/StandaloneModuleGraph.zig
|
||||
src/StaticHashMap.zig
|
||||
src/string.zig
|
||||
src/string/HashedString.zig
|
||||
src/string/immutable.zig
|
||||
src/string/immutable/escapeHTML.zig
|
||||
src/string/immutable/exact_size_matcher.zig
|
||||
src/string/immutable/grapheme.zig
|
||||
src/string/immutable/paths.zig
|
||||
src/string/immutable/unicode.zig
|
||||
src/string/immutable/visible.zig
|
||||
src/string/MutableString.zig
|
||||
src/string/PathString.zig
|
||||
src/string/SmolStr.zig
|
||||
src/string/StringBuilder.zig
|
||||
src/string/StringJoiner.zig
|
||||
src/string/WTFStringImpl.zig
|
||||
src/sys_uv.zig
|
||||
src/sys.zig
|
||||
src/system_timer.zig
|
||||
src/test/fixtures.zig
|
||||
src/test/recover.zig
|
||||
src/threading.zig
|
||||
src/threading/channel.zig
|
||||
src/threading/Condition.zig
|
||||
src/threading/Futex.zig
|
||||
src/threading/guarded_value.zig
|
||||
src/threading/Mutex.zig
|
||||
src/threading/ThreadPool.zig
|
||||
src/threading/unbounded_queue.zig
|
||||
src/threading/WaitGroup.zig
|
||||
src/tmp.zig
|
||||
src/tracy.zig
|
||||
src/trait.zig
|
||||
src/transpiler.zig
|
||||
src/unit_test.zig
|
||||
src/url.zig
|
||||
src/util.zig
|
||||
src/valkey/index.zig
|
||||
src/valkey/js_valkey_functions.zig
|
||||
src/valkey/js_valkey.zig
|
||||
src/valkey/valkey_protocol.zig
|
||||
src/valkey/valkey.zig
|
||||
src/valkey/ValkeyCommand.zig
|
||||
src/valkey/ValkeyContext.zig
|
||||
src/walker_skippable.zig
|
||||
src/Watcher.zig
|
||||
src/watcher/INotifyWatcher.zig
|
||||
src/watcher/KEventWatcher.zig
|
||||
src/watcher/WindowsWatcher.zig
|
||||
src/which_npm_client.zig
|
||||
src/which.zig
|
||||
src/windows.zig
|
||||
src/work_pool.zig
|
||||
src/workaround_missing_symbols.zig
|
||||
src/wyhash.zig
|
||||
src/zlib.zig
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
7a5d984c69b0c34c4cbb56c6812eaa5b9bef485c
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -2,8 +2,6 @@ include(PathUtils)
|
||||
|
||||
if(DEBUG)
|
||||
set(bun bun-debug)
|
||||
elseif(ENABLE_ASAN AND ENABLE_VALGRIND)
|
||||
set(bun bun-asan-valgrind)
|
||||
elseif(ENABLE_ASAN)
|
||||
set(bun bun-asan)
|
||||
elseif(ENABLE_VALGRIND)
|
||||
@@ -44,14 +42,6 @@ else()
|
||||
set(CONFIGURE_DEPENDS "")
|
||||
endif()
|
||||
|
||||
set(LLVM_ZIG_CODEGEN_THREADS 0)
|
||||
# This makes the build slower, so we turn it off for now.
|
||||
# if (DEBUG)
|
||||
# include(ProcessorCount)
|
||||
# ProcessorCount(CPU_COUNT)
|
||||
# set(LLVM_ZIG_CODEGEN_THREADS ${CPU_COUNT})
|
||||
# endif()
|
||||
|
||||
# --- Dependencies ---
|
||||
|
||||
set(BUN_DEPENDENCIES
|
||||
@@ -326,6 +316,14 @@ set(BUN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/cpp.zig
|
||||
)
|
||||
|
||||
# Root-level node_modules install needed for @lezer/cpp dependency used by cppbind.ts
|
||||
register_bun_install(
|
||||
CWD
|
||||
${CWD}
|
||||
NODE_MODULES_VARIABLE
|
||||
BUN_ROOT_NODE_MODULES
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-cppbind
|
||||
@@ -339,6 +337,7 @@ register_command(
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
${BUN_ROOT_NODE_MODULES}
|
||||
OUTPUTS
|
||||
${BUN_CPP_OUTPUTS}
|
||||
)
|
||||
@@ -586,13 +585,7 @@ if (TEST)
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-test.o)
|
||||
set(ZIG_STEPS test)
|
||||
else()
|
||||
if (LLVM_ZIG_CODEGEN_THREADS GREATER 1)
|
||||
foreach(i RANGE ${LLVM_ZIG_CODEGEN_THREADS})
|
||||
list(APPEND BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.${i}.o)
|
||||
endforeach()
|
||||
else()
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
endif()
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
set(ZIG_STEPS obj)
|
||||
endif()
|
||||
|
||||
@@ -634,9 +627,7 @@ register_command(
|
||||
-Doptimize=${ZIG_OPTIMIZE}
|
||||
-Dcpu=${ZIG_CPU}
|
||||
-Denable_logs=$<IF:$<BOOL:${ENABLE_LOGS}>,true,false>
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ASAN}>,true,false>
|
||||
-Dversion=${VERSION}
|
||||
-Dreported_nodejs_version=${NODEJS_VERSION}
|
||||
-Dcanary=${CANARY_REVISION}
|
||||
@@ -654,7 +645,6 @@ register_command(
|
||||
SOURCES
|
||||
${BUN_ZIG_SOURCES}
|
||||
${BUN_ZIG_GENERATED_SOURCES}
|
||||
${CWD}/src/install/PackageManager/scanner-entry.ts # Is there a better way to do this?
|
||||
)
|
||||
|
||||
set_property(TARGET bun-zig PROPERTY JOB_POOL compile_pool)
|
||||
@@ -904,8 +894,12 @@ if(NOT WIN32)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC -fsanitize=address)
|
||||
target_link_libraries(${bun} PUBLIC -fsanitize=address)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_link_libraries(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
endif()
|
||||
|
||||
target_compile_options(${bun} PUBLIC
|
||||
@@ -944,8 +938,12 @@ if(NOT WIN32)
|
||||
)
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
target_compile_options(${bun} PUBLIC -fsanitize=address)
|
||||
target_link_libraries(${bun} PUBLIC -fsanitize=address)
|
||||
target_compile_options(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
target_link_libraries(${bun} PUBLIC
|
||||
-fsanitize=address
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
@@ -979,7 +977,6 @@ if(WIN32)
|
||||
/delayload:WSOCK32.dll
|
||||
/delayload:ADVAPI32.dll
|
||||
/delayload:IPHLPAPI.dll
|
||||
/delayload:CRYPT32.dll
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
@@ -1021,7 +1018,6 @@ if(LINUX)
|
||||
-Wl,--wrap=exp2
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=gettid
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
@@ -1047,6 +1043,7 @@ if(LINUX)
|
||||
--ld-path=${LLD_PROGRAM}
|
||||
-fno-pic
|
||||
-Wl,-no-pie
|
||||
-Wl,-icf=safe
|
||||
-Wl,--as-needed
|
||||
-Wl,-z,stack-size=12800000
|
||||
-Wl,--compress-debug-sections=zlib
|
||||
@@ -1072,13 +1069,6 @@ if(LINUX)
|
||||
-Wl,--gc-sections
|
||||
)
|
||||
endif()
|
||||
|
||||
if (NOT DEBUG AND NOT ENABLE_ASAN AND NOT ENABLE_VALGRIND)
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,-icf=safe
|
||||
)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
# --- Symbols list ---
|
||||
@@ -1138,9 +1128,6 @@ endif()
|
||||
|
||||
include_directories(${WEBKIT_INCLUDE_PATH})
|
||||
|
||||
# Include the generated dependency versions header
|
||||
include_directories(${CMAKE_BINARY_DIR})
|
||||
|
||||
if(NOT WEBKIT_LOCAL AND NOT APPLE)
|
||||
include_directories(${WEBKIT_INCLUDE_PATH}/wtf/unicode)
|
||||
endif()
|
||||
@@ -1200,7 +1187,6 @@ if(WIN32)
|
||||
ntdll
|
||||
userenv
|
||||
dbghelp
|
||||
crypt32
|
||||
wsock32 # ws2_32 required by TransmitFile aka sendfile on windows
|
||||
delayimp.lib
|
||||
)
|
||||
@@ -1222,7 +1208,6 @@ if(NOT BUN_CPP_ONLY)
|
||||
endif()
|
||||
|
||||
if(bunStrip)
|
||||
# First, strip bun-profile.exe to create bun.exe
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
@@ -1243,48 +1228,6 @@ if(NOT BUN_CPP_ONLY)
|
||||
OUTPUTS
|
||||
${BUILD_PATH}/${bunStripExe}
|
||||
)
|
||||
|
||||
# Then sign both executables on Windows
|
||||
if(WIN32 AND ENABLE_WINDOWS_CODESIGNING)
|
||||
set(SIGN_SCRIPT "${CMAKE_SOURCE_DIR}/.buildkite/scripts/sign-windows.ps1")
|
||||
|
||||
# Verify signing script exists
|
||||
if(NOT EXISTS "${SIGN_SCRIPT}")
|
||||
message(FATAL_ERROR "Windows signing script not found: ${SIGN_SCRIPT}")
|
||||
endif()
|
||||
|
||||
# Use PowerShell for Windows code signing (native Windows, no path issues)
|
||||
find_program(POWERSHELL_EXECUTABLE
|
||||
NAMES pwsh.exe powershell.exe
|
||||
PATHS
|
||||
"C:/Program Files/PowerShell/7"
|
||||
"C:/Program Files (x86)/PowerShell/7"
|
||||
"C:/Windows/System32/WindowsPowerShell/v1.0"
|
||||
DOC "Path to PowerShell executable"
|
||||
)
|
||||
|
||||
if(NOT POWERSHELL_EXECUTABLE)
|
||||
set(POWERSHELL_EXECUTABLE "powershell.exe")
|
||||
endif()
|
||||
|
||||
message(STATUS "Using PowerShell executable: ${POWERSHELL_EXECUTABLE}")
|
||||
|
||||
# Sign both bun-profile.exe and bun.exe after stripping
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
TARGET_PHASE
|
||||
POST_BUILD
|
||||
COMMENT
|
||||
"Code signing bun-profile.exe and bun.exe with DigiCert KeyLocker"
|
||||
COMMAND
|
||||
"${POWERSHELL_EXECUTABLE}" "-NoProfile" "-ExecutionPolicy" "Bypass" "-File" "${SIGN_SCRIPT}" "-BunProfileExe" "${BUILD_PATH}/${bunExe}" "-BunExe" "${BUILD_PATH}/${bunStripExe}"
|
||||
CWD
|
||||
${CMAKE_SOURCE_DIR}
|
||||
SOURCES
|
||||
${BUILD_PATH}/${bunStripExe}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# somehow on some Linux systems we need to disable ASLR for ASAN-instrumented binaries to run
|
||||
@@ -1376,20 +1319,12 @@ if(NOT BUN_CPP_ONLY)
|
||||
if(ENABLE_BASELINE)
|
||||
set(bunTriplet ${bunTriplet}-baseline)
|
||||
endif()
|
||||
|
||||
if (ENABLE_ASAN AND ENABLE_VALGRIND)
|
||||
set(bunTriplet ${bunTriplet}-asan-valgrind)
|
||||
set(bunPath ${bunTriplet})
|
||||
elseif (ENABLE_VALGRIND)
|
||||
set(bunTriplet ${bunTriplet}-valgrind)
|
||||
set(bunPath ${bunTriplet})
|
||||
elseif(ENABLE_ASAN)
|
||||
if(ENABLE_ASAN)
|
||||
set(bunTriplet ${bunTriplet}-asan)
|
||||
set(bunPath ${bunTriplet})
|
||||
else()
|
||||
string(REPLACE bun ${bunTriplet} bunPath ${bun})
|
||||
endif()
|
||||
|
||||
set(bunFiles ${bunExe} features.json)
|
||||
if(WIN32)
|
||||
list(APPEND bunFiles ${bun}.pdb)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
HdrHistogram/HdrHistogram_c
|
||||
COMMIT
|
||||
be60a9987ee48d0abf0d7b6a175bad8d6c1585d1
|
||||
652d51bcc36744fd1a6debfeb1a8a5f58b14022c
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libarchive/libarchive
|
||||
COMMIT
|
||||
9525f90ca4bd14c7b335e2f8c84a4607b0af6bdf
|
||||
7118f97c26bf0b2f426728b482f86508efc81d02
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -4,8 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
libuv/libuv
|
||||
COMMIT
|
||||
# Corresponds to v1.51.0
|
||||
5152db2cbfeb5582e9c27c5ea1dba2cd9e10759b
|
||||
da527d8d2a908b824def74382761566371439003
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
|
||||
@@ -13,52 +13,14 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_SHARED=OFF
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_OVERRIDE=OFF
|
||||
-DMI_OSX_ZONE=OFF
|
||||
-DMI_OSX_INTERPOSE=OFF
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=1 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
#
|
||||
# Peak memory usage: 74 MB
|
||||
# ```
|
||||
#
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=1 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 52 MB
|
||||
#
|
||||
# ❯ mimalloc_allow_large_os_pages=0 mem bun --eval 1
|
||||
#
|
||||
# Peak memory usage: 30 MB
|
||||
# ```
|
||||
-DMI_NO_THP=1
|
||||
)
|
||||
|
||||
if (ABI STREQUAL "musl")
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_LIBC_MUSL=ON)
|
||||
endif()
|
||||
|
||||
if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_TRACK_ASAN=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
|
||||
elseif(APPLE OR LINUX)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
else()
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(DEBUG)
|
||||
@@ -69,12 +31,6 @@ if(ENABLE_VALGRIND)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_VALGRIND=ON)
|
||||
endif()
|
||||
|
||||
# Enable SIMD optimizations when not building for baseline (older CPUs)
|
||||
if(NOT ENABLE_BASELINE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_ARCH=ON)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OPT_SIMD=ON)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
@@ -97,7 +53,6 @@ if(APPLE OR (LINUX AND NOT DEBUG))
|
||||
set(MIMALLOC_LIBRARY CMakeFiles/mimalloc-obj.dir/src/static.c.o)
|
||||
endif()
|
||||
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
mimalloc
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
# GenerateDependencyVersions.cmake
|
||||
# Generates a header file with all dependency versions
|
||||
|
||||
# Function to extract version from git tree object
|
||||
function(get_git_tree_hash dep_name output_var)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD:./src/deps/${dep_name}
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE commit_hash
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
RESULT_VARIABLE result
|
||||
)
|
||||
if(result EQUAL 0 AND commit_hash)
|
||||
set(${output_var} "${commit_hash}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Function to extract version from header file using regex
|
||||
function(extract_version_from_header header_file regex_pattern output_var)
|
||||
if(EXISTS "${header_file}")
|
||||
file(STRINGS "${header_file}" version_line REGEX "${regex_pattern}")
|
||||
if(version_line)
|
||||
string(REGEX MATCH "${regex_pattern}" _match "${version_line}")
|
||||
if(CMAKE_MATCH_1)
|
||||
set(${output_var} "${CMAKE_MATCH_1}" PARENT_SCOPE)
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
else()
|
||||
set(${output_var} "unknown" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Main function to generate the header file
|
||||
function(generate_dependency_versions_header)
|
||||
set(DEPS_PATH "${CMAKE_SOURCE_DIR}/src/deps")
|
||||
set(VENDOR_PATH "${CMAKE_SOURCE_DIR}/vendor")
|
||||
|
||||
# Initialize version variables
|
||||
set(DEPENDENCY_VERSIONS "")
|
||||
|
||||
# WebKit version (from SetupWebKit.cmake or command line)
|
||||
if(WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION_STR "${WEBKIT_VERSION}")
|
||||
else()
|
||||
set(WEBKIT_VERSION_STR "0ddf6f47af0a9782a354f61e06d7f83d097d9f84")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "WEBKIT" "${WEBKIT_VERSION_STR}")
|
||||
|
||||
# Track input files so CMake reconfigures when they change
|
||||
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS
|
||||
"${CMAKE_SOURCE_DIR}/package.json"
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
)
|
||||
|
||||
# Hardcoded dependency versions (previously from generated_versions_list.zig)
|
||||
# These are the commit hashes/tree objects for each dependency
|
||||
list(APPEND DEPENDENCY_VERSIONS "BORINGSSL" "29a2cd359458c9384694b75456026e4b57e3e567")
|
||||
list(APPEND DEPENDENCY_VERSIONS "C_ARES" "d1722e6e8acaf10eb73fa995798a9cd421d9f85e")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBARCHIVE" "898dc8319355b7e985f68a9819f182aaed61b53a")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_HASH" "dc76454a39e7e83b68c3704b6e3784654f8d5ac5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LOLHTML" "8d4c273ded322193d017042d1f48df2766b0f88b")
|
||||
list(APPEND DEPENDENCY_VERSIONS "LSHPACK" "3d0f1fc1d6e66a642e7a98c55deb38aa986eb4b0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "MIMALLOC" "4c283af60cdae205df5a872530c77e2a6a307d43")
|
||||
list(APPEND DEPENDENCY_VERSIONS "PICOHTTPPARSER" "066d2b1e9ab820703db0837a7255d92d30f0c9f5")
|
||||
list(APPEND DEPENDENCY_VERSIONS "TINYCC" "ab631362d839333660a265d3084d8ff060b96753")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_HASH" "886098f3f339617b4243b286f5ed364b9989e245")
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_HASH" "794ea1b0afca0f020f4e57b6732332231fb23c70")
|
||||
|
||||
# Extract semantic versions from header files where available
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/libdeflate/libdeflate.h"
|
||||
"#define LIBDEFLATE_VERSION_STRING[ \t]+\"([0-9\\.]+)\""
|
||||
LIBDEFLATE_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "LIBDEFLATE_VERSION" "${LIBDEFLATE_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${VENDOR_PATH}/zlib/zlib.h"
|
||||
"#define[ \t]+ZLIB_VERSION[ \t]+\"([^\"]+)\""
|
||||
ZLIB_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZLIB_VERSION" "${ZLIB_VERSION_STRING}")
|
||||
|
||||
extract_version_from_header(
|
||||
"${DEPS_PATH}/zstd/lib/zstd.h"
|
||||
"#define[ \t]+ZSTD_VERSION_STRING[ \t]+\"([^\"]+)\""
|
||||
ZSTD_VERSION_STRING
|
||||
)
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZSTD_VERSION" "${ZSTD_VERSION_STRING}")
|
||||
|
||||
# Bun version from package.json
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/package.json")
|
||||
file(READ "${CMAKE_SOURCE_DIR}/package.json" PACKAGE_JSON)
|
||||
string(REGEX MATCH "\"version\"[ \t]*:[ \t]*\"([^\"]+)\"" _ ${PACKAGE_JSON})
|
||||
if(CMAKE_MATCH_1)
|
||||
set(BUN_VERSION_STRING "${CMAKE_MATCH_1}")
|
||||
else()
|
||||
set(BUN_VERSION_STRING "unknown")
|
||||
endif()
|
||||
else()
|
||||
set(BUN_VERSION_STRING "${VERSION}")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "BUN_VERSION" "${BUN_VERSION_STRING}")
|
||||
|
||||
# Node.js compatibility version (hardcoded as in the current implementation)
|
||||
set(NODEJS_COMPAT_VERSION "22.12.0")
|
||||
list(APPEND DEPENDENCY_VERSIONS "NODEJS_COMPAT_VERSION" "${NODEJS_COMPAT_VERSION}")
|
||||
|
||||
# Get Bun's git SHA for uws/usockets versions (they use Bun's own SHA)
|
||||
execute_process(
|
||||
COMMAND git rev-parse HEAD
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
OUTPUT_VARIABLE BUN_GIT_SHA
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
ERROR_QUIET
|
||||
)
|
||||
if(NOT BUN_GIT_SHA)
|
||||
set(BUN_GIT_SHA "unknown")
|
||||
endif()
|
||||
list(APPEND DEPENDENCY_VERSIONS "UWS" "${BUN_GIT_SHA}")
|
||||
list(APPEND DEPENDENCY_VERSIONS "USOCKETS" "${BUN_GIT_SHA}")
|
||||
|
||||
# Zig version - hardcoded for now, can be updated as needed
|
||||
# This should match the version of Zig used to build Bun
|
||||
list(APPEND DEPENDENCY_VERSIONS "ZIG" "0.14.1")
|
||||
|
||||
# Generate the header file content
|
||||
set(HEADER_CONTENT "// This file is auto-generated by CMake. Do not edit manually.\n")
|
||||
string(APPEND HEADER_CONTENT "#ifndef BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEPENDENCY_VERSIONS_H\n\n")
|
||||
string(APPEND HEADER_CONTENT "#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "extern \"C\" {\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "// Dependency versions\n")
|
||||
|
||||
# Process the version list
|
||||
list(LENGTH DEPENDENCY_VERSIONS num_versions)
|
||||
math(EXPR last_idx "${num_versions} - 1")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit #define if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "#define BUN_DEP_${name} \"${value}\"\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n")
|
||||
string(APPEND HEADER_CONTENT "// C string constants for easy access\n")
|
||||
|
||||
# Create C string constants
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
# Only emit constant if value is not "unknown"
|
||||
if(NOT "${value}" STREQUAL "unknown")
|
||||
string(APPEND HEADER_CONTENT "static const char* const BUN_VERSION_${name} = \"${value}\";\n")
|
||||
endif()
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
|
||||
string(APPEND HEADER_CONTENT "\n#ifdef __cplusplus\n")
|
||||
string(APPEND HEADER_CONTENT "}\n")
|
||||
string(APPEND HEADER_CONTENT "#endif\n\n")
|
||||
string(APPEND HEADER_CONTENT "#endif // BUN_DEPENDENCY_VERSIONS_H\n")
|
||||
|
||||
# Write the header file only if content has changed
|
||||
set(OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions.h")
|
||||
|
||||
# Read existing content if file exists
|
||||
set(EXISTING_CONTENT "")
|
||||
if(EXISTS "${OUTPUT_FILE}")
|
||||
file(READ "${OUTPUT_FILE}" EXISTING_CONTENT)
|
||||
endif()
|
||||
|
||||
# Only write if content is different
|
||||
if(NOT "${EXISTING_CONTENT}" STREQUAL "${HEADER_CONTENT}")
|
||||
file(WRITE "${OUTPUT_FILE}" "${HEADER_CONTENT}")
|
||||
message(STATUS "Updated dependency versions header: ${OUTPUT_FILE}")
|
||||
else()
|
||||
message(STATUS "Dependency versions header unchanged: ${OUTPUT_FILE}")
|
||||
endif()
|
||||
|
||||
# Also create a more detailed version for debugging
|
||||
set(DEBUG_OUTPUT_FILE "${CMAKE_BINARY_DIR}/bun_dependency_versions_debug.txt")
|
||||
set(DEBUG_CONTENT "Bun Dependency Versions\n")
|
||||
string(APPEND DEBUG_CONTENT "=======================\n\n")
|
||||
set(i 0)
|
||||
while(i LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${i} name)
|
||||
math(EXPR value_idx "${i} + 1")
|
||||
if(value_idx LESS num_versions)
|
||||
list(GET DEPENDENCY_VERSIONS ${value_idx} value)
|
||||
string(APPEND DEBUG_CONTENT "${name}: ${value}\n")
|
||||
endif()
|
||||
math(EXPR i "${i} + 2")
|
||||
endwhile()
|
||||
file(WRITE "${DEBUG_OUTPUT_FILE}" "${DEBUG_CONTENT}")
|
||||
endfunction()
|
||||
|
||||
# Call the function to generate the header
|
||||
generate_dependency_versions_header()
|
||||
@@ -131,9 +131,6 @@ else()
|
||||
find_llvm_command(CMAKE_RANLIB llvm-ranlib)
|
||||
if(LINUX)
|
||||
find_llvm_command(LLD_PROGRAM ld.lld)
|
||||
# Ensure vendor dependencies use lld instead of ld
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_EXE_LINKER_FLAGS=--ld-path=${LLD_PROGRAM})
|
||||
list(APPEND CMAKE_ARGS -DCMAKE_SHARED_LINKER_FLAGS=--ld-path=${LLD_PROGRAM})
|
||||
endif()
|
||||
if(APPLE)
|
||||
find_llvm_command(CMAKE_DSYMUTIL dsymutil)
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 69fa2714ab5f917c2d15501ff8cfdccfaea78882)
|
||||
set(WEBKIT_VERSION 1098cc50652ab1eab171f58f7669e19ca6c276ae)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "55fdbfa0c86be86b68d43a4ba761e6909eb0d7b2")
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
@@ -90,7 +90,6 @@ register_command(
|
||||
-DZIG_PATH=${ZIG_PATH}
|
||||
-DZIG_COMMIT=${ZIG_COMMIT}
|
||||
-DENABLE_ASAN=${ENABLE_ASAN}
|
||||
-DENABLE_VALGRIND=${ENABLE_VALGRIND}
|
||||
-DZIG_COMPILER_SAFE=${ZIG_COMPILER_SAFE}
|
||||
-P ${CWD}/cmake/scripts/DownloadZig.cmake
|
||||
SOURCES
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -665,6 +665,7 @@ _bun_test_completion() {
|
||||
'--timeout[Set the per-test timeout in milliseconds, default is 5000.]:timeout' \
|
||||
'--update-snapshots[Update snapshot files]' \
|
||||
'--rerun-each[Re-run each test file <NUMBER> times, helps catch certain bugs]:rerun' \
|
||||
'--only[Only run tests that are marked with "test.only()"]' \
|
||||
'--todo[Include tests that are marked with "test.todo()"]' \
|
||||
'--coverage[Generate a coverage profile]' \
|
||||
'--bail[Exit the test suite after <NUMBER> failures. If you do not specify a number, it defaults to 1.]:bail' \
|
||||
|
||||
@@ -233,7 +233,6 @@ In addition to the standard fetch options, Bun provides several extensions:
|
||||
```ts
|
||||
const response = await fetch("http://example.com", {
|
||||
// Control automatic response decompression (default: true)
|
||||
// Supports gzip, deflate, brotli (br), and zstd
|
||||
decompress: true,
|
||||
|
||||
// Disable connection reuse for this request
|
||||
@@ -321,6 +320,7 @@ Bun automatically sets the `Content-Type` header for request bodies when not exp
|
||||
|
||||
- For `Blob` objects, uses the blob's `type`
|
||||
- For `FormData`, sets appropriate multipart boundary
|
||||
- For JSON objects, sets `application/json`
|
||||
|
||||
## Debugging
|
||||
|
||||
@@ -340,7 +340,7 @@ This will print the request and response headers to your terminal:
|
||||
[fetch] > User-Agent: Bun/$BUN_LATEST_VERSION
|
||||
[fetch] > Accept: */*
|
||||
[fetch] > Host: example.com
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br, zstd
|
||||
[fetch] > Accept-Encoding: gzip, deflate, br
|
||||
|
||||
[fetch] < 200 OK
|
||||
[fetch] < Content-Encoding: gzip
|
||||
|
||||
@@ -155,24 +155,3 @@ const glob = new Glob("\\!index.ts");
|
||||
glob.match("!index.ts"); // => true
|
||||
glob.match("index.ts"); // => false
|
||||
```
|
||||
|
||||
## Node.js `fs.glob()` compatibility
|
||||
|
||||
Bun also implements Node.js's `fs.glob()` functions with additional features:
|
||||
|
||||
```ts
|
||||
import { glob, globSync, promises } from "node:fs";
|
||||
|
||||
// Array of patterns
|
||||
const files = await promises.glob(["**/*.ts", "**/*.js"]);
|
||||
|
||||
// Exclude patterns
|
||||
const filtered = await promises.glob("**/*", {
|
||||
exclude: ["node_modules/**", "*.test.*"],
|
||||
});
|
||||
```
|
||||
|
||||
All three functions (`fs.glob()`, `fs.globSync()`, `fs.promises.glob()`) support:
|
||||
|
||||
- Array of patterns as the first argument
|
||||
- `exclude` option to filter results
|
||||
|
||||
@@ -184,7 +184,6 @@ Bun.hash.rapidhash("data", 1234);
|
||||
|
||||
- `"blake2b256"`
|
||||
- `"blake2b512"`
|
||||
- `"blake2s256"`
|
||||
- `"md4"`
|
||||
- `"md5"`
|
||||
- `"ripemd160"`
|
||||
|
||||
@@ -164,70 +164,6 @@ Static responses do not allocate additional memory after initialization. You can
|
||||
|
||||
Static route responses are cached for the lifetime of the server object. To reload static routes, call `server.reload(options)`.
|
||||
|
||||
### File Responses vs Static Responses
|
||||
|
||||
When serving files in routes, there are two distinct behaviors depending on whether you buffer the file content or serve it directly:
|
||||
|
||||
```ts
|
||||
Bun.serve({
|
||||
routes: {
|
||||
// Static route - content is buffered in memory at startup
|
||||
"/logo.png": new Response(await Bun.file("./logo.png").bytes()),
|
||||
|
||||
// File route - content is read from filesystem on each request
|
||||
"/download.zip": new Response(Bun.file("./download.zip")),
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
**Static routes** (`new Response(await file.bytes())`) buffer content in memory at startup:
|
||||
|
||||
- **Zero filesystem I/O** during requests - content served entirely from memory
|
||||
- **ETag support** - Automatically generates and validates ETags for caching
|
||||
- **If-None-Match** - Returns `304 Not Modified` when client ETag matches
|
||||
- **No 404 handling** - Missing files cause startup errors, not runtime 404s
|
||||
- **Memory usage** - Full file content stored in RAM
|
||||
- **Best for**: Small static assets, API responses, frequently accessed files
|
||||
|
||||
**File routes** (`new Response(Bun.file(path))`) read from filesystem per request:
|
||||
|
||||
- **Filesystem reads** on each request - checks file existence and reads content
|
||||
- **Built-in 404 handling** - Returns `404 Not Found` if file doesn't exist or becomes inaccessible
|
||||
- **Last-Modified support** - Uses file modification time for `If-Modified-Since` headers
|
||||
- **If-Modified-Since** - Returns `304 Not Modified` when file hasn't changed since client's cached version
|
||||
- **Range request support** - Automatically handles partial content requests with `Content-Range` headers
|
||||
- **Streaming transfers** - Uses buffered reader with backpressure handling for efficient memory usage
|
||||
- **Memory efficient** - Only buffers small chunks during transfer, not entire file
|
||||
- **Best for**: Large files, dynamic content, user uploads, files that change frequently
|
||||
|
||||
### HTTP Caching Behavior
|
||||
|
||||
Both route types implement HTTP caching standards but with different strategies:
|
||||
|
||||
#### Static Routes Caching
|
||||
|
||||
- **ETag generation**: Automatically computes ETag hash from content at startup
|
||||
- **If-None-Match**: Validates client ETag against server ETag
|
||||
- **304 responses**: Returns `304 Not Modified` with empty body when ETags match
|
||||
- **Cache headers**: Inherits any `Cache-Control` headers you provide in the Response
|
||||
- **Consistency**: ETag remains constant until server restart or route reload
|
||||
|
||||
#### File Routes Caching
|
||||
|
||||
- **Last-Modified**: Uses file's `mtime` for `Last-Modified` header
|
||||
- **If-Modified-Since**: Compares client date with file modification time
|
||||
- **304 responses**: Returns `304 Not Modified` when file unchanged since client's cached version
|
||||
- **Content-Length**: Automatically set based on current file size
|
||||
- **Dynamic validation**: Checks file modification time on each request
|
||||
|
||||
#### Status Code Handling
|
||||
|
||||
Both route types automatically adjust status codes:
|
||||
|
||||
- **200 → 204**: Empty files (0 bytes) return `204 No Content` instead of `200 OK`
|
||||
- **200 → 304**: Successful cache validation returns `304 Not Modified`
|
||||
- **File routes only**: Missing or inaccessible files return `404 Not Found`
|
||||
|
||||
```ts
|
||||
const server = Bun.serve({
|
||||
static: {
|
||||
|
||||
@@ -88,9 +88,6 @@ await redis.set("user:1:name", "Alice");
|
||||
// Get a key
|
||||
const name = await redis.get("user:1:name");
|
||||
|
||||
// Get a key as Uint8Array
|
||||
const buffer = await redis.getBuffer("user:1:name");
|
||||
|
||||
// Delete a key
|
||||
await redis.del("user:1:name");
|
||||
|
||||
@@ -135,10 +132,6 @@ await redis.hmset("user:123", [
|
||||
const userFields = await redis.hmget("user:123", ["name", "email"]);
|
||||
console.log(userFields); // ["Alice", "alice@example.com"]
|
||||
|
||||
// Get single field from hash (returns value directly, null if missing)
|
||||
const userName = await redis.hget("user:123", "name");
|
||||
console.log(userName); // "Alice"
|
||||
|
||||
// Increment a numeric field in a hash
|
||||
await redis.hincrby("user:123", "visits", 1);
|
||||
|
||||
@@ -168,102 +161,6 @@ const randomTag = await redis.srandmember("tags");
|
||||
const poppedTag = await redis.spop("tags");
|
||||
```
|
||||
|
||||
## Pub/Sub
|
||||
|
||||
Bun provides native bindings for the [Redis
|
||||
Pub/Sub](https://redis.io/docs/latest/develop/pubsub/) protocol. **New in Bun
|
||||
1.2.23**
|
||||
|
||||
{% callout %}
|
||||
**🚧** — The Redis Pub/Sub feature is experimental. Although we expect it to be
|
||||
stable, we're currently actively looking for feedback and areas for improvement.
|
||||
{% /callout %}
|
||||
|
||||
### Basic Usage
|
||||
|
||||
To get started publishing messages, you can set up a publisher in
|
||||
`publisher.ts`:
|
||||
|
||||
```typescript#publisher.ts
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const writer = new RedisClient("redis://localhost:6739");
|
||||
await writer.connect();
|
||||
|
||||
writer.publish("general", "Hello everyone!");
|
||||
|
||||
writer.close();
|
||||
```
|
||||
|
||||
In another file, create the subscriber in `subscriber.ts`:
|
||||
|
||||
```typescript#subscriber.ts
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const listener = new RedisClient("redis://localhost:6739");
|
||||
await listener.connect();
|
||||
|
||||
await listener.subscribe("general", (message, channel) => {
|
||||
console.log(`Received: ${message}`);
|
||||
});
|
||||
```
|
||||
|
||||
In one shell, run your subscriber:
|
||||
|
||||
```bash
|
||||
bun run subscriber.ts
|
||||
```
|
||||
|
||||
and, in another, run your publisher:
|
||||
|
||||
```bash
|
||||
bun run publisher.ts
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
**Note:** The subscription mode takes over the `RedisClient` connection. A
|
||||
client with subscriptions can only call `RedisClient.prototype.subscribe()`. In
|
||||
other words, applications which need to message Redis need a separate
|
||||
connection, acquirable through `.duplicate()`:
|
||||
|
||||
```typescript
|
||||
import { RedisClient } from "bun";
|
||||
|
||||
const redis = new RedisClient("redis://localhost:6379");
|
||||
await redis.connect();
|
||||
const subscriber = await redis.duplicate();
|
||||
|
||||
await subscriber.subscribe("foo", () => {});
|
||||
await redis.set("bar", "baz");
|
||||
```
|
||||
|
||||
{% /callout %}
|
||||
|
||||
### Publishing
|
||||
|
||||
Publishing messages is done through the `publish()` method:
|
||||
|
||||
```typescript
|
||||
await client.publish(channelName, message);
|
||||
```
|
||||
|
||||
### Subscriptions
|
||||
|
||||
The Bun `RedisClient` allows you to subscribe to channels through the
|
||||
`.subscribe()` method:
|
||||
|
||||
```typescript
|
||||
await client.subscribe(channel, (message, channel) => {});
|
||||
```
|
||||
|
||||
You can unsubscribe through the `.unsubscribe()` method:
|
||||
|
||||
```typescript
|
||||
await client.unsubscribe(); // Unsubscribe from all channels.
|
||||
await client.unsubscribe(channel); // Unsubscribe a particular channel.
|
||||
await client.unsubscribe(channel, listener); // Unsubscribe a particular listener.
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Command Execution and Pipelining
|
||||
@@ -585,10 +482,9 @@ When connecting to Redis servers using older versions that don't support RESP3,
|
||||
|
||||
Current limitations of the Redis client we are planning to address in future versions:
|
||||
|
||||
- [ ] No dedicated API for pub/sub functionality (though you can use the raw command API)
|
||||
- [ ] Transactions (MULTI/EXEC) must be done through raw commands for now
|
||||
- [ ] Streams are supported but without dedicated methods
|
||||
- [ ] Pub/Sub does not currently support binary data, nor pattern-based
|
||||
subscriptions.
|
||||
|
||||
Unsupported features:
|
||||
|
||||
|
||||
@@ -1,319 +0,0 @@
|
||||
Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.
|
||||
|
||||
**Experimental:** This API is new and experimental. It may change in the future.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
const githubToken = await secrets.get({
|
||||
service: "my-cli-tool",
|
||||
name: "github-token",
|
||||
});
|
||||
|
||||
if (!githubToken) {
|
||||
const response = await fetch("https://api.github.com/name", {
|
||||
headers: { "Authorization": `token ${githubToken}` },
|
||||
});
|
||||
console.log("Please enter your GitHub token");
|
||||
} else {
|
||||
await secrets.set({
|
||||
service: "my-cli-tool",
|
||||
name: "github-token",
|
||||
value: prompt("Please enter your GitHub token"),
|
||||
});
|
||||
console.log("GitHub token stored");
|
||||
}
|
||||
```
|
||||
|
||||
## Overview
|
||||
|
||||
`Bun.secrets` provides a cross-platform API for managing sensitive credentials that CLI tools and development applications typically store in plaintext files like `~/.npmrc`, `~/.aws/credentials`, or `.env` files. It uses:
|
||||
|
||||
- **macOS**: Keychain Services
|
||||
- **Linux**: libsecret (GNOME Keyring, KWallet, etc.)
|
||||
- **Windows**: Windows Credential Manager
|
||||
|
||||
All operations are asynchronous and non-blocking, running on Bun's threadpool.
|
||||
|
||||
Note: in the future, we may add an additional `provider` option to make this better for production deployment secrets, but today this API is mostly useful for local development tools.
|
||||
|
||||
## API
|
||||
|
||||
### `Bun.secrets.get(options)`
|
||||
|
||||
Retrieve a stored credential.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
const password = await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
});
|
||||
// Returns: string | null
|
||||
|
||||
// Or if you prefer without an object
|
||||
const password = await Bun.secrets.get("my-app", "alice@example.com");
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
|
||||
**Returns:**
|
||||
|
||||
- `Promise<string | null>` - The stored password, or `null` if not found
|
||||
|
||||
### `Bun.secrets.set(options, value)`
|
||||
|
||||
Store or update a credential.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
await secrets.set({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
value: "super-secret-password",
|
||||
});
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
- `value` (string, required) - The password or secret to store
|
||||
|
||||
**Notes:**
|
||||
|
||||
- If a credential already exists for the given service/name combination, it will be replaced
|
||||
- The stored value is encrypted by the operating system
|
||||
|
||||
### `Bun.secrets.delete(options)`
|
||||
|
||||
Delete a stored credential.
|
||||
|
||||
```typescript
|
||||
const deleted = await Bun.secrets.delete({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
value: "super-secret-password",
|
||||
});
|
||||
// Returns: boolean
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
|
||||
**Returns:**
|
||||
|
||||
- `Promise<boolean>` - `true` if a credential was deleted, `false` if not found
|
||||
|
||||
## Examples
|
||||
|
||||
### Storing CLI Tool Credentials
|
||||
|
||||
```javascript
|
||||
// Store GitHub CLI token (instead of ~/.config/gh/hosts.yml)
|
||||
await Bun.secrets.set({
|
||||
service: "my-app.com",
|
||||
name: "github-token",
|
||||
value: "ghp_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Or if you prefer without an object
|
||||
await Bun.secrets.set("my-app.com", "github-token", "ghp_xxxxxxxxxxxxxxxxxxxx");
|
||||
|
||||
// Store npm registry token (instead of ~/.npmrc)
|
||||
await Bun.secrets.set({
|
||||
service: "npm-registry",
|
||||
name: "https://registry.npmjs.org",
|
||||
value: "npm_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Retrieve for API calls
|
||||
const token = await Bun.secrets.get({
|
||||
service: "gh-cli",
|
||||
name: "github.com",
|
||||
});
|
||||
|
||||
if (token) {
|
||||
const response = await fetch("https://api.github.com/name", {
|
||||
headers: {
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Migrating from Plaintext Config Files
|
||||
|
||||
```javascript
|
||||
// Instead of storing in ~/.aws/credentials
|
||||
await Bun.secrets.set({
|
||||
service: "aws-cli",
|
||||
name: "AWS_SECRET_ACCESS_KEY",
|
||||
value: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
});
|
||||
|
||||
// Instead of .env files with sensitive data
|
||||
await Bun.secrets.set({
|
||||
service: "my-app",
|
||||
name: "api-key",
|
||||
value: "sk_live_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Load at runtime
|
||||
const apiKey =
|
||||
(await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "api-key",
|
||||
})) || process.env.API_KEY; // Fallback for CI/production
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```javascript
|
||||
try {
|
||||
await Bun.secrets.set({
|
||||
service: "my-app",
|
||||
name: "alice",
|
||||
value: "password123",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to store credential:", error.message);
|
||||
}
|
||||
|
||||
// Check if a credential exists
|
||||
const password = await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "alice",
|
||||
});
|
||||
|
||||
if (password === null) {
|
||||
console.log("No credential found");
|
||||
}
|
||||
```
|
||||
|
||||
### Updating Credentials
|
||||
|
||||
```javascript
|
||||
// Initial password
|
||||
await Bun.secrets.set({
|
||||
service: "email-server",
|
||||
name: "admin@example.com",
|
||||
value: "old-password",
|
||||
});
|
||||
|
||||
// Update to new password
|
||||
await Bun.secrets.set({
|
||||
service: "email-server",
|
||||
name: "admin@example.com",
|
||||
value: "new-password",
|
||||
});
|
||||
|
||||
// The old password is replaced
|
||||
```
|
||||
|
||||
## Platform Behavior
|
||||
|
||||
### macOS (Keychain)
|
||||
|
||||
- Credentials are stored in the name's login keychain
|
||||
- The keychain may prompt for access permission on first use
|
||||
- Credentials persist across system restarts
|
||||
- Accessible by the name who stored them
|
||||
|
||||
### Linux (libsecret)
|
||||
|
||||
- Requires a secret service daemon (GNOME Keyring, KWallet, etc.)
|
||||
- Credentials are stored in the default collection
|
||||
- May prompt for unlock if the keyring is locked
|
||||
- The secret service must be running
|
||||
|
||||
### Windows (Credential Manager)
|
||||
|
||||
- Credentials are stored in Windows Credential Manager
|
||||
- Visible in Control Panel → Credential Manager → Windows Credentials
|
||||
- Persist with `CRED_PERSIST_ENTERPRISE` flag so it's scoped per user
|
||||
- Encrypted using Windows Data Protection API
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Encryption**: Credentials are encrypted by the operating system's credential manager
|
||||
2. **Access Control**: Only the name who stored the credential can retrieve it
|
||||
3. **No Plain Text**: Passwords are never stored in plain text
|
||||
4. **Memory Safety**: Bun zeros out password memory after use
|
||||
5. **Process Isolation**: Credentials are isolated per name account
|
||||
|
||||
## Limitations
|
||||
|
||||
- Maximum password length varies by platform (typically 2048-4096 bytes)
|
||||
- Service and name names should be reasonable lengths (< 256 characters)
|
||||
- Some special characters may need escaping depending on the platform
|
||||
- Requires appropriate system services:
|
||||
- Linux: Secret service daemon must be running
|
||||
- macOS: Keychain Access must be available
|
||||
- Windows: Credential Manager service must be enabled
|
||||
|
||||
## Comparison with Environment Variables
|
||||
|
||||
Unlike environment variables, `Bun.secrets`:
|
||||
|
||||
- ✅ Encrypts credentials at rest (thanks to the operating system)
|
||||
- ✅ Avoids exposing secrets in process memory dumps (memory is zeroed after its no longer needed)
|
||||
- ✅ Survives application restarts
|
||||
- ✅ Can be updated without restarting the application
|
||||
- ✅ Provides name-level access control
|
||||
- ❌ Requires OS credential service
|
||||
- ❌ Not very useful for deployment secrets (use environment variables in production)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use descriptive service names**: Match the tool or application name
|
||||
If you're building a CLI for external use, you probably should use a UTI (Uniform Type Identifier) for the service name.
|
||||
|
||||
```javascript
|
||||
// Good - matches the actual tool
|
||||
{ service: "com.docker.hub", name: "username" }
|
||||
{ service: "com.vercel.cli", name: "team-name" }
|
||||
|
||||
// Avoid - too generic
|
||||
{ service: "api", name: "key" }
|
||||
```
|
||||
|
||||
2. **Credentials-only**: Don't store application configuration in this API
|
||||
This API is slow, you probably still need to use a config file for some things.
|
||||
|
||||
3. **Use for local development tools**:
|
||||
- ✅ CLI tools (gh, npm, docker, kubectl)
|
||||
- ✅ Local development servers
|
||||
- ✅ Personal API keys for testing
|
||||
- ❌ Production servers (use proper secret management)
|
||||
|
||||
## TypeScript
|
||||
|
||||
```typescript
|
||||
namespace Bun {
|
||||
interface SecretsOptions {
|
||||
service: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface Secrets {
|
||||
get(options: SecretsOptions): Promise<string | null>;
|
||||
set(options: SecretsOptions, value: string): Promise<void>;
|
||||
delete(options: SecretsOptions): Promise<boolean>;
|
||||
}
|
||||
|
||||
const secrets: Secrets;
|
||||
}
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [Environment Variables](./env.md) - For deployment configuration
|
||||
- [Bun.password](./password.md) - For password hashing and verification
|
||||
733
docs/api/sql.md
733
docs/api/sql.md
@@ -1,27 +1,20 @@
|
||||
Bun provides native bindings for working with SQL databases through a unified Promise-based API that supports PostgreSQL, MySQL, and SQLite. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
Bun provides native bindings for working with PostgreSQL databases with a modern, Promise-based API. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
|
||||
```ts
|
||||
import { sql, SQL } from "bun";
|
||||
import { sql } from "bun";
|
||||
|
||||
// PostgreSQL (default)
|
||||
const users = await sql`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${true}
|
||||
LIMIT ${10}
|
||||
`;
|
||||
|
||||
// With MySQL
|
||||
const mysql = new SQL("mysql://user:pass@localhost:3306/mydb");
|
||||
const mysqlResults = await mysql`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${true}
|
||||
`;
|
||||
|
||||
// With SQLite
|
||||
const sqlite = new SQL("sqlite://myapp.db");
|
||||
const sqliteResults = await sqlite`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${1}
|
||||
// Select with multiple conditions
|
||||
const activeUsers = await sql`
|
||||
SELECT *
|
||||
FROM users
|
||||
WHERE active = ${true}
|
||||
AND age >= ${18}
|
||||
`;
|
||||
```
|
||||
|
||||
@@ -51,186 +44,6 @@ const sqliteResults = await sqlite`
|
||||
|
||||
{% /features %}
|
||||
|
||||
## Database Support
|
||||
|
||||
Bun.SQL provides a unified API for multiple database systems:
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
PostgreSQL is used when:
|
||||
|
||||
- The connection string doesn't match SQLite or MySQL patterns (it's the fallback adapter)
|
||||
- The connection string explicitly uses `postgres://` or `postgresql://` protocols
|
||||
- No connection string is provided and environment variables point to PostgreSQL
|
||||
|
||||
```ts
|
||||
import { sql } from "bun";
|
||||
// Uses PostgreSQL if DATABASE_URL is not set or is a PostgreSQL URL
|
||||
await sql`SELECT ...`;
|
||||
|
||||
import { SQL } from "bun";
|
||||
const pg = new SQL("postgres://user:pass@localhost:5432/mydb");
|
||||
await pg`SELECT ...`;
|
||||
```
|
||||
|
||||
### MySQL
|
||||
|
||||
MySQL support is built into Bun.SQL, providing the same tagged template literal interface with full compatibility for MySQL 5.7+ and MySQL 8.0+:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
// MySQL connection
|
||||
const mysql = new SQL("mysql://user:password@localhost:3306/database");
|
||||
const mysql2 = new SQL("mysql2://user:password@localhost:3306/database"); // mysql2 protocol also works
|
||||
|
||||
// Using options object
|
||||
const mysql3 = new SQL({
|
||||
adapter: "mysql",
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
});
|
||||
|
||||
// Works with parameters - automatically uses prepared statements
|
||||
const users = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Transactions work the same as PostgreSQL
|
||||
await mysql.begin(async tx => {
|
||||
await tx`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
await tx`UPDATE accounts SET balance = balance - 100 WHERE user_id = ${userId}`;
|
||||
});
|
||||
|
||||
// Bulk inserts
|
||||
const newUsers = [
|
||||
{ name: "Alice", email: "alice@example.com" },
|
||||
{ name: "Bob", email: "bob@example.com" },
|
||||
];
|
||||
await mysql`INSERT INTO users ${mysql(newUsers)}`;
|
||||
```
|
||||
|
||||
{% details summary="MySQL Connection String Formats" %}
|
||||
|
||||
MySQL accepts various URL formats for connection strings:
|
||||
|
||||
```ts
|
||||
// Standard mysql:// protocol
|
||||
new SQL("mysql://user:pass@localhost:3306/database");
|
||||
new SQL("mysql://user:pass@localhost/database"); // Default port 3306
|
||||
|
||||
// mysql2:// protocol (compatibility with mysql2 npm package)
|
||||
new SQL("mysql2://user:pass@localhost:3306/database");
|
||||
|
||||
// With query parameters
|
||||
new SQL("mysql://user:pass@localhost/db?ssl=true");
|
||||
|
||||
// Unix socket connection
|
||||
new SQL("mysql://user:pass@/database?socket=/var/run/mysqld/mysqld.sock");
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
{% details summary="MySQL-Specific Features" %}
|
||||
|
||||
MySQL databases support:
|
||||
|
||||
- **Prepared statements**: Automatically created for parameterized queries with statement caching
|
||||
- **Binary protocol**: For better performance with prepared statements and accurate type handling
|
||||
- **Multiple result sets**: Support for stored procedures returning multiple result sets
|
||||
- **Authentication plugins**: Support for mysql_native_password, caching_sha2_password (MySQL 8.0 default), and sha256_password
|
||||
- **SSL/TLS connections**: Configurable SSL modes similar to PostgreSQL
|
||||
- **Connection attributes**: Client information sent to server for monitoring
|
||||
- **Query pipelining**: Execute multiple prepared statements without waiting for responses
|
||||
|
||||
{% /details %}
|
||||
|
||||
### SQLite
|
||||
|
||||
SQLite support is built into Bun.SQL, providing the same tagged template literal interface:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
// In-memory database
|
||||
const memory = new SQL(":memory:");
|
||||
const memory2 = new SQL("sqlite://:memory:");
|
||||
|
||||
// File-based database
|
||||
const db = new SQL("sqlite://myapp.db");
|
||||
|
||||
// Using options object
|
||||
const db2 = new SQL({
|
||||
adapter: "sqlite",
|
||||
filename: "./data/app.db",
|
||||
});
|
||||
|
||||
// For simple filenames, specify adapter explicitly
|
||||
const db3 = new SQL("myapp.db", { adapter: "sqlite" });
|
||||
```
|
||||
|
||||
{% details summary="SQLite Connection String Formats" %}
|
||||
|
||||
SQLite accepts various URL formats for connection strings:
|
||||
|
||||
```ts
|
||||
// Standard sqlite:// protocol
|
||||
new SQL("sqlite://path/to/database.db");
|
||||
new SQL("sqlite:path/to/database.db"); // Without slashes
|
||||
|
||||
// file:// protocol (also recognized as SQLite)
|
||||
new SQL("file://path/to/database.db");
|
||||
new SQL("file:path/to/database.db");
|
||||
|
||||
// Special :memory: database
|
||||
new SQL(":memory:");
|
||||
new SQL("sqlite://:memory:");
|
||||
new SQL("file://:memory:");
|
||||
|
||||
// Relative and absolute paths
|
||||
new SQL("sqlite://./local.db"); // Relative to current directory
|
||||
new SQL("sqlite://../parent/db.db"); // Parent directory
|
||||
new SQL("sqlite:///absolute/path.db"); // Absolute path
|
||||
|
||||
// With query parameters
|
||||
new SQL("sqlite://data.db?mode=ro"); // Read-only mode
|
||||
new SQL("sqlite://data.db?mode=rw"); // Read-write mode (no create)
|
||||
new SQL("sqlite://data.db?mode=rwc"); // Read-write-create mode (default)
|
||||
```
|
||||
|
||||
**Note:** Simple filenames without a protocol (like `"myapp.db"`) require explicitly specifying `{ adapter: "sqlite" }` to avoid ambiguity with PostgreSQL.
|
||||
|
||||
{% /details %}
|
||||
|
||||
{% details summary="SQLite-Specific Options" %}
|
||||
|
||||
SQLite databases support additional configuration options:
|
||||
|
||||
```ts
|
||||
const db = new SQL({
|
||||
adapter: "sqlite",
|
||||
filename: "app.db",
|
||||
|
||||
// SQLite-specific options
|
||||
readonly: false, // Open in read-only mode
|
||||
create: true, // Create database if it doesn't exist
|
||||
readwrite: true, // Open for reading and writing
|
||||
|
||||
// Additional Bun:sqlite options
|
||||
strict: true, // Enable strict mode
|
||||
safeIntegers: false, // Use JavaScript numbers for integers
|
||||
});
|
||||
```
|
||||
|
||||
Query parameters in the URL are parsed to set these options:
|
||||
|
||||
- `?mode=ro` → `readonly: true`
|
||||
- `?mode=rw` → `readonly: false, create: false`
|
||||
- `?mode=rwc` → `readonly: false, create: true` (default)
|
||||
|
||||
{% /details %}
|
||||
|
||||
### Inserting data
|
||||
|
||||
You can pass JavaScript values directly to the SQL template literal and escaping will be handled for you.
|
||||
@@ -377,22 +190,6 @@ const users = [
|
||||
await sql`SELECT * FROM users WHERE id IN ${sql(users, "id")}`;
|
||||
```
|
||||
|
||||
### `sql.array` helper
|
||||
|
||||
The `sql.array` helper creates PostgreSQL array literals from JavaScript arrays:
|
||||
|
||||
```ts
|
||||
// Create array literals for PostgreSQL
|
||||
await sql`INSERT INTO tags (items) VALUES (${sql.array(["red", "blue", "green"])})`;
|
||||
// Generates: INSERT INTO tags (items) VALUES (ARRAY['red', 'blue', 'green'])
|
||||
|
||||
// Works with numeric arrays too
|
||||
await sql`SELECT * FROM products WHERE ids = ANY(${sql.array([1, 2, 3])})`;
|
||||
// Generates: SELECT * FROM products WHERE ids = ANY(ARRAY[1, 2, 3])
|
||||
```
|
||||
|
||||
**Note**: `sql.array` is PostgreSQL-only. Multi-dimensional arrays and NULL elements may not be supported yet.
|
||||
|
||||
## `sql``.simple()`
|
||||
|
||||
The PostgreSQL wire protocol supports two types of queries: "simple" and "extended". Simple queries can contain multiple statements but don't support parameters, while extended queries (the default) support parameters but only allow one statement.
|
||||
@@ -454,97 +251,14 @@ await query;
|
||||
|
||||
## Database Environment Variables
|
||||
|
||||
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence and automatically detects the database type based on the connection string format.
|
||||
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence.
|
||||
|
||||
### Automatic Database Detection
|
||||
|
||||
When using `Bun.sql()` without arguments or `new SQL()` with a connection string, the adapter is automatically detected based on the URL format:
|
||||
|
||||
#### MySQL Auto-Detection
|
||||
|
||||
MySQL is automatically selected when the connection string matches these patterns:
|
||||
|
||||
- `mysql://...` - MySQL protocol URLs
|
||||
- `mysql2://...` - MySQL2 protocol URLs (compatibility alias)
|
||||
|
||||
```ts
|
||||
// These all use MySQL automatically (no adapter needed)
|
||||
const sql1 = new SQL("mysql://user:pass@localhost/mydb");
|
||||
const sql2 = new SQL("mysql2://user:pass@localhost:3306/mydb");
|
||||
|
||||
// Works with DATABASE_URL environment variable
|
||||
DATABASE_URL="mysql://user:pass@localhost/mydb" bun run app.js
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb" bun run app.js
|
||||
```
|
||||
|
||||
#### SQLite Auto-Detection
|
||||
|
||||
SQLite is automatically selected when the connection string matches these patterns:
|
||||
|
||||
- `:memory:` - In-memory database
|
||||
- `sqlite://...` - SQLite protocol URLs
|
||||
- `sqlite:...` - SQLite protocol without slashes
|
||||
- `file://...` - File protocol URLs
|
||||
- `file:...` - File protocol without slashes
|
||||
|
||||
```ts
|
||||
// These all use SQLite automatically (no adapter needed)
|
||||
const sql1 = new SQL(":memory:");
|
||||
const sql2 = new SQL("sqlite://app.db");
|
||||
const sql3 = new SQL("file://./database.db");
|
||||
|
||||
// Works with DATABASE_URL environment variable
|
||||
DATABASE_URL=":memory:" bun run app.js
|
||||
DATABASE_URL="sqlite://myapp.db" bun run app.js
|
||||
DATABASE_URL="file://./data/app.db" bun run app.js
|
||||
```
|
||||
|
||||
#### PostgreSQL Auto-Detection
|
||||
|
||||
PostgreSQL is the default for connection strings that don't match MySQL or SQLite patterns:
|
||||
|
||||
```bash
|
||||
# PostgreSQL is detected for these patterns
|
||||
DATABASE_URL="postgres://user:pass@localhost:5432/mydb" bun run app.js
|
||||
DATABASE_URL="postgresql://user:pass@localhost:5432/mydb" bun run app.js
|
||||
|
||||
# Or any URL that doesn't match MySQL or SQLite patterns
|
||||
DATABASE_URL="localhost:5432/mydb" bun run app.js
|
||||
```
|
||||
|
||||
### MySQL Environment Variables
|
||||
|
||||
MySQL connections can be configured via environment variables:
|
||||
|
||||
```bash
|
||||
# Primary connection URL (checked first)
|
||||
MYSQL_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
|
||||
# Alternative: DATABASE_URL with MySQL protocol
|
||||
DATABASE_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb"
|
||||
```
|
||||
|
||||
If no connection URL is provided, MySQL checks these individual parameters:
|
||||
|
||||
| Environment Variable | Default Value | Description |
|
||||
| ------------------------ | ------------- | -------------------------------- |
|
||||
| `MYSQL_HOST` | `localhost` | Database host |
|
||||
| `MYSQL_PORT` | `3306` | Database port |
|
||||
| `MYSQL_USER` | `root` | Database user |
|
||||
| `MYSQL_PASSWORD` | (empty) | Database password |
|
||||
| `MYSQL_DATABASE` | `mysql` | Database name |
|
||||
| `MYSQL_URL` | (empty) | Primary connection URL for MySQL |
|
||||
| `TLS_MYSQL_DATABASE_URL` | (empty) | SSL/TLS-enabled connection URL |
|
||||
|
||||
### PostgreSQL Environment Variables
|
||||
|
||||
The following environment variables can be used to define the PostgreSQL connection:
|
||||
The following environment variables can be used to define the connection URL:
|
||||
|
||||
| Environment Variable | Description |
|
||||
| --------------------------- | ------------------------------------------ |
|
||||
| `POSTGRES_URL` | Primary connection URL for PostgreSQL |
|
||||
| `DATABASE_URL` | Alternative connection URL (auto-detected) |
|
||||
| `DATABASE_URL` | Alternative connection URL |
|
||||
| `PGURL` | Alternative connection URL |
|
||||
| `PG_URL` | Alternative connection URL |
|
||||
| `TLS_POSTGRES_DATABASE_URL` | SSL/TLS-enabled connection URL |
|
||||
@@ -560,19 +274,6 @@ If no connection URL is provided, the system checks for the following individual
|
||||
| `PGPASSWORD` | - | (empty) | Database password |
|
||||
| `PGDATABASE` | - | username | Database name |
|
||||
|
||||
### SQLite Environment Variables
|
||||
|
||||
SQLite connections can be configured via `DATABASE_URL` when it contains a SQLite-compatible URL:
|
||||
|
||||
```bash
|
||||
# These are all recognized as SQLite
|
||||
DATABASE_URL=":memory:"
|
||||
DATABASE_URL="sqlite://./app.db"
|
||||
DATABASE_URL="file:///absolute/path/to/db.sqlite"
|
||||
```
|
||||
|
||||
**Note:** PostgreSQL-specific environment variables (`POSTGRES_URL`, `PGHOST`, etc.) are ignored when using SQLite.
|
||||
|
||||
## Runtime Preconnection
|
||||
|
||||
Bun can preconnect to PostgreSQL at startup to improve performance by establishing database connections before your application code runs. This is useful for reducing connection latency on the first database query.
|
||||
@@ -592,65 +293,16 @@ The `--sql-preconnect` flag will automatically establish a PostgreSQL connection
|
||||
|
||||
## Connection Options
|
||||
|
||||
You can configure your database connection manually by passing options to the SQL constructor. Options vary depending on the database adapter:
|
||||
|
||||
### MySQL Options
|
||||
You can configure your database connection manually by passing options to the SQL constructor:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Required for MySQL when using options object
|
||||
adapter: "mysql",
|
||||
|
||||
// Connection details
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
|
||||
// Unix socket connection (alternative to hostname/port)
|
||||
// socket: "/var/run/mysqld/mysqld.sock",
|
||||
|
||||
// Connection pool settings
|
||||
max: 20, // Maximum connections in pool (default: 10)
|
||||
idleTimeout: 30, // Close idle connections after 30s
|
||||
maxLifetime: 0, // Connection lifetime in seconds (0 = forever)
|
||||
connectionTimeout: 30, // Timeout when establishing new connections
|
||||
|
||||
// SSL/TLS options
|
||||
tls: {
|
||||
rejectUnauthorized: true,
|
||||
ca: "path/to/ca.pem",
|
||||
key: "path/to/key.pem",
|
||||
cert: "path/to/cert.pem",
|
||||
},
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("Connected to MySQL");
|
||||
},
|
||||
onclose: (client, err) => {
|
||||
if (err) {
|
||||
console.error("MySQL connection error:", err);
|
||||
} else {
|
||||
console.log("MySQL connection closed");
|
||||
}
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### PostgreSQL Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Connection details (adapter is auto-detected as PostgreSQL)
|
||||
// Required
|
||||
url: "postgres://user:pass@localhost:5432/dbname",
|
||||
|
||||
// Alternative connection parameters
|
||||
// Optional configuration
|
||||
hostname: "localhost",
|
||||
port: 5432,
|
||||
database: "myapp",
|
||||
@@ -678,52 +330,14 @@ const db = new SQL({
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("Connected to PostgreSQL");
|
||||
console.log("Connected to database");
|
||||
},
|
||||
onclose: client => {
|
||||
console.log("PostgreSQL connection closed");
|
||||
console.log("Connection closed");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### SQLite Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Required for SQLite
|
||||
adapter: "sqlite",
|
||||
filename: "./data/app.db", // or ":memory:" for in-memory database
|
||||
|
||||
// SQLite-specific access modes
|
||||
readonly: false, // Open in read-only mode
|
||||
create: true, // Create database if it doesn't exist
|
||||
readwrite: true, // Allow read and write operations
|
||||
|
||||
// SQLite data handling
|
||||
strict: true, // Enable strict mode for better type safety
|
||||
safeIntegers: false, // Use BigInt for integers exceeding JS number range
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("SQLite database opened");
|
||||
},
|
||||
onclose: client => {
|
||||
console.log("SQLite database closed");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
{% details summary="SQLite Connection Notes" %}
|
||||
|
||||
- **Connection Pooling**: SQLite doesn't use connection pooling as it's a file-based database. Each `SQL` instance represents a single connection.
|
||||
- **Transactions**: SQLite supports nested transactions through savepoints, similar to PostgreSQL.
|
||||
- **Concurrent Access**: SQLite handles concurrent access through file locking. Use WAL mode for better concurrency.
|
||||
- **Memory Databases**: Using `:memory:` creates a temporary database that exists only for the connection lifetime.
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Dynamic passwords
|
||||
|
||||
When clients need to use alternative authentication schemes such as access tokens or connections to databases with rotating passwords, provide either a synchronous or asynchronous function that will resolve the dynamic password value at connection time.
|
||||
@@ -739,66 +353,11 @@ const sql = new SQL(url, {
|
||||
});
|
||||
```
|
||||
|
||||
## SQLite-Specific Features
|
||||
|
||||
### Query Execution
|
||||
|
||||
SQLite executes queries synchronously, unlike PostgreSQL which uses asynchronous I/O. However, the API remains consistent using Promises:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// Works the same as PostgreSQL, but executes synchronously under the hood
|
||||
const users = await sqlite`SELECT * FROM users`;
|
||||
|
||||
// Parameters work identically
|
||||
const user = await sqlite`SELECT * FROM users WHERE id = ${userId}`;
|
||||
```
|
||||
|
||||
### SQLite Pragmas
|
||||
|
||||
You can use PRAGMA statements to configure SQLite behavior:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// Enable foreign keys
|
||||
await sqlite`PRAGMA foreign_keys = ON`;
|
||||
|
||||
// Set journal mode to WAL for better concurrency
|
||||
await sqlite`PRAGMA journal_mode = WAL`;
|
||||
|
||||
// Check integrity
|
||||
const integrity = await sqlite`PRAGMA integrity_check`;
|
||||
```
|
||||
|
||||
### Data Type Differences
|
||||
|
||||
SQLite has a more flexible type system than PostgreSQL:
|
||||
|
||||
```ts
|
||||
// SQLite stores data in 5 storage classes: NULL, INTEGER, REAL, TEXT, BLOB
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// SQLite is more lenient with types
|
||||
await sqlite`
|
||||
CREATE TABLE flexible (
|
||||
id INTEGER PRIMARY KEY,
|
||||
data TEXT, -- Can store numbers as strings
|
||||
value NUMERIC, -- Can store integers, reals, or text
|
||||
blob BLOB -- Binary data
|
||||
)
|
||||
`;
|
||||
|
||||
// JavaScript values are automatically converted
|
||||
await sqlite`INSERT INTO flexible VALUES (${1}, ${"text"}, ${123.45}, ${Buffer.from("binary")})`;
|
||||
```
|
||||
|
||||
## Transactions
|
||||
|
||||
To start a new transaction, use `sql.begin`. This method works for both PostgreSQL and SQLite. For PostgreSQL, it reserves a dedicated connection from the pool. For SQLite, it begins a transaction on the single connection.
|
||||
To start a new transaction, use `sql.begin`. This method reserves a dedicated connection for the duration of the transaction and provides a scoped `sql` instance to use within the callback function. Once the callback completes, `sql.begin` resolves with the return value of the callback.
|
||||
|
||||
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to ensure the process continues smoothly.
|
||||
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to release the reserved connection and ensure the process continues smoothly.
|
||||
|
||||
### Basic Transactions
|
||||
|
||||
@@ -993,36 +552,9 @@ Note that disabling prepared statements may impact performance for queries that
|
||||
|
||||
## Error Handling
|
||||
|
||||
The client provides typed errors for different failure scenarios. Errors are database-specific and extend from base error classes:
|
||||
The client provides typed errors for different failure scenarios:
|
||||
|
||||
### Error Classes
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
try {
|
||||
await sql`SELECT * FROM users`;
|
||||
} catch (error) {
|
||||
if (error instanceof SQL.PostgresError) {
|
||||
// PostgreSQL-specific error
|
||||
console.log(error.code); // PostgreSQL error code
|
||||
console.log(error.detail); // Detailed error message
|
||||
console.log(error.hint); // Helpful hint from PostgreSQL
|
||||
} else if (error instanceof SQL.SQLiteError) {
|
||||
// SQLite-specific error
|
||||
console.log(error.code); // SQLite error code (e.g., "SQLITE_CONSTRAINT")
|
||||
console.log(error.errno); // SQLite error number
|
||||
console.log(error.byteOffset); // Byte offset in SQL statement (if available)
|
||||
} else if (error instanceof SQL.SQLError) {
|
||||
// Generic SQL error (base class)
|
||||
console.log(error.message);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{% details summary="PostgreSQL-Specific Error Codes" %}
|
||||
|
||||
### PostgreSQL Connection Errors
|
||||
### Connection Errors
|
||||
|
||||
| Connection Errors | Description |
|
||||
| --------------------------------- | ---------------------------------------------------- |
|
||||
@@ -1087,51 +619,6 @@ try {
|
||||
| `ERR_POSTGRES_UNSAFE_TRANSACTION` | Unsafe transaction operation detected |
|
||||
| `ERR_POSTGRES_INVALID_TRANSACTION_STATE` | Invalid transaction state |
|
||||
|
||||
{% /details %}
|
||||
|
||||
### SQLite-Specific Errors
|
||||
|
||||
SQLite errors provide error codes and numbers that correspond to SQLite's standard error codes:
|
||||
|
||||
{% details summary="Common SQLite Error Codes" %}
|
||||
|
||||
| Error Code | errno | Description |
|
||||
| ------------------- | ----- | ---------------------------------------------------- |
|
||||
| `SQLITE_CONSTRAINT` | 19 | Constraint violation (UNIQUE, CHECK, NOT NULL, etc.) |
|
||||
| `SQLITE_BUSY` | 5 | Database is locked |
|
||||
| `SQLITE_LOCKED` | 6 | Table in the database is locked |
|
||||
| `SQLITE_READONLY` | 8 | Attempt to write to a readonly database |
|
||||
| `SQLITE_IOERR` | 10 | Disk I/O error |
|
||||
| `SQLITE_CORRUPT` | 11 | Database disk image is malformed |
|
||||
| `SQLITE_FULL` | 13 | Database or disk is full |
|
||||
| `SQLITE_CANTOPEN` | 14 | Unable to open database file |
|
||||
| `SQLITE_PROTOCOL` | 15 | Database lock protocol error |
|
||||
| `SQLITE_SCHEMA` | 17 | Database schema has changed |
|
||||
| `SQLITE_TOOBIG` | 18 | String or BLOB exceeds size limit |
|
||||
| `SQLITE_MISMATCH` | 20 | Data type mismatch |
|
||||
| `SQLITE_MISUSE` | 21 | Library used incorrectly |
|
||||
| `SQLITE_AUTH` | 23 | Authorization denied |
|
||||
|
||||
Example error handling:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
try {
|
||||
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Alice')`;
|
||||
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Bob')`; // Duplicate ID
|
||||
} catch (error) {
|
||||
if (error instanceof SQL.SQLiteError) {
|
||||
if (error.code === "SQLITE_CONSTRAINT") {
|
||||
console.log("Constraint violation:", error.message);
|
||||
// Handle unique constraint violation
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Numbers and BigInt
|
||||
|
||||
Bun's SQL client includes special handling for large numbers that exceed the range of a 53-bit integer. Here's how it works:
|
||||
@@ -1164,106 +651,12 @@ console.log(typeof x, x); // "bigint" 9223372036854777n
|
||||
There's still some things we haven't finished yet.
|
||||
|
||||
- Connection preloading via `--db-preconnect` Bun CLI flag
|
||||
- MySQL support: [we're working on it](https://github.com/oven-sh/bun/pull/15274)
|
||||
- SQLite support: planned, but not started. Ideally, we implement it natively instead of wrapping `bun:sqlite`.
|
||||
- Column name transforms (e.g. `snake_case` to `camelCase`). This is mostly blocked on a unicode-aware implementation of changing the case in C++ using WebKit's `WTF::String`.
|
||||
- Column type transforms
|
||||
|
||||
## Database-Specific Features
|
||||
|
||||
#### Authentication Methods
|
||||
|
||||
MySQL supports multiple authentication plugins that are automatically negotiated:
|
||||
|
||||
- **`mysql_native_password`** - Traditional MySQL authentication, widely compatible
|
||||
- **`caching_sha2_password`** - Default in MySQL 8.0+, more secure with RSA key exchange
|
||||
- **`sha256_password`** - SHA-256 based authentication
|
||||
|
||||
The client automatically handles authentication plugin switching when requested by the server, including secure password exchange over non-SSL connections.
|
||||
|
||||
#### Prepared Statements & Performance
|
||||
|
||||
MySQL uses server-side prepared statements for all parameterized queries:
|
||||
|
||||
```ts
|
||||
// This automatically creates a prepared statement on the server
|
||||
const user = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Prepared statements are cached and reused for identical queries
|
||||
for (const id of userIds) {
|
||||
// Same prepared statement is reused
|
||||
await mysql`SELECT * FROM users WHERE id = ${id}`;
|
||||
}
|
||||
|
||||
// Query pipelining - multiple statements sent without waiting
|
||||
const [users, orders, products] = await Promise.all([
|
||||
mysql`SELECT * FROM users WHERE active = ${true}`,
|
||||
mysql`SELECT * FROM orders WHERE status = ${"pending"}`,
|
||||
mysql`SELECT * FROM products WHERE in_stock = ${true}`,
|
||||
]);
|
||||
```
|
||||
|
||||
#### Multiple Result Sets
|
||||
|
||||
MySQL can return multiple result sets from multi-statement queries:
|
||||
|
||||
```ts
|
||||
const mysql = new SQL("mysql://user:pass@localhost/mydb");
|
||||
|
||||
// Multi-statement queries with simple() method
|
||||
const multiResults = await mysql`
|
||||
SELECT * FROM users WHERE id = 1;
|
||||
SELECT * FROM orders WHERE user_id = 1;
|
||||
`.simple();
|
||||
```
|
||||
|
||||
#### Character Sets & Collations
|
||||
|
||||
Bun.SQL automatically uses `utf8mb4` character set for MySQL connections, ensuring full Unicode support including emojis. This is the recommended character set for modern MySQL applications.
|
||||
|
||||
#### Connection Attributes
|
||||
|
||||
Bun automatically sends client information to MySQL for better monitoring:
|
||||
|
||||
```ts
|
||||
// These attributes are sent automatically:
|
||||
// _client_name: "Bun"
|
||||
// _client_version: <bun version>
|
||||
// You can see these in MySQL's performance_schema.session_connect_attrs
|
||||
```
|
||||
|
||||
#### Type Handling
|
||||
|
||||
MySQL types are automatically converted to JavaScript types:
|
||||
|
||||
| MySQL Type | JavaScript Type | Notes |
|
||||
| --------------------------------------- | ------------------------ | ---------------------------------------------------------------------------------------------------- |
|
||||
| INT, TINYINT, MEDIUMINT | number | Within safe integer range |
|
||||
| BIGINT | string, number or BigInt | If the value fits in i32/u32 size will be number otherwise string or BigInt Based on `bigint` option |
|
||||
| DECIMAL, NUMERIC | string | To preserve precision |
|
||||
| FLOAT, DOUBLE | number | |
|
||||
| DATE | Date | JavaScript Date object |
|
||||
| DATETIME, TIMESTAMP | Date | With timezone handling |
|
||||
| TIME | number | Total of microseconds |
|
||||
| YEAR | number | |
|
||||
| CHAR, VARCHAR, VARSTRING, STRING | string | |
|
||||
| TINY TEXT, MEDIUM TEXT, TEXT, LONG TEXT | string | |
|
||||
| TINY BLOB, MEDIUM BLOB, BLOG, LONG BLOB | string | BLOB Types are alias for TEXT types |
|
||||
| JSON | object/array | Automatically parsed |
|
||||
| BIT(1) | boolean | BIT(1) in MySQL |
|
||||
| GEOMETRY | string | Geometry data |
|
||||
|
||||
#### Differences from PostgreSQL
|
||||
|
||||
While the API is unified, there are some behavioral differences:
|
||||
|
||||
1. **Parameter placeholders**: MySQL uses `?` internally but Bun converts `$1, $2` style automatically
|
||||
2. **RETURNING clause**: MySQL doesn't support RETURNING; use `result.lastInsertRowid` or a separate SELECT
|
||||
3. **Array types**: MySQL doesn't have native array types like PostgreSQL
|
||||
|
||||
### MySQL-Specific Features
|
||||
|
||||
We haven't implemented `LOAD DATA INFILE` support yet
|
||||
|
||||
### PostgreSQL-Specific Features
|
||||
### Postgres-specific features
|
||||
|
||||
We haven't implemented these yet:
|
||||
|
||||
@@ -1278,89 +671,13 @@ We also haven't implemented some of the more uncommon features like:
|
||||
- Point & PostGIS types
|
||||
- All the multi-dimensional integer array types (only a couple of the types are supported)
|
||||
|
||||
## Common Patterns & Best Practices
|
||||
|
||||
### Working with MySQL Result Sets
|
||||
|
||||
```ts
|
||||
// Getting insert ID after INSERT
|
||||
const result = await mysql`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
console.log(result.lastInsertRowid); // MySQL's LAST_INSERT_ID()
|
||||
|
||||
// Handling affected rows
|
||||
const updated =
|
||||
await mysql`UPDATE users SET active = ${false} WHERE age < ${18}`;
|
||||
console.log(updated.affectedRows); // Number of rows updated
|
||||
|
||||
// Using MySQL-specific functions
|
||||
const now = await mysql`SELECT NOW() as current_time`;
|
||||
const uuid = await mysql`SELECT UUID() as id`;
|
||||
```
|
||||
|
||||
### MySQL Error Handling
|
||||
|
||||
```ts
|
||||
try {
|
||||
await mysql`INSERT INTO users (email) VALUES (${"duplicate@email.com"})`;
|
||||
} catch (error) {
|
||||
if (error.code === "ER_DUP_ENTRY") {
|
||||
console.log("Duplicate entry detected");
|
||||
} else if (error.code === "ER_ACCESS_DENIED_ERROR") {
|
||||
console.log("Access denied");
|
||||
} else if (error.code === "ER_BAD_DB_ERROR") {
|
||||
console.log("Database does not exist");
|
||||
}
|
||||
// MySQL error codes are compatible with mysql/mysql2 packages
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Tips for MySQL
|
||||
|
||||
1. **Use connection pooling**: Set appropriate `max` pool size based on your workload
|
||||
2. **Enable prepared statements**: They're enabled by default and improve performance
|
||||
3. **Use transactions for bulk operations**: Group related queries in transactions
|
||||
4. **Index properly**: MySQL relies heavily on indexes for query performance
|
||||
5. **Use `utf8mb4` charset**: It's set by default and handles all Unicode characters
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
> Why is this `Bun.sql` and not `Bun.postgres`?
|
||||
|
||||
The plan was to add more database drivers in the future. Now with MySQL support added, this unified API supports PostgreSQL, MySQL, and SQLite.
|
||||
The plan is to add more database drivers in the future.
|
||||
|
||||
> How do I know which database adapter is being used?
|
||||
|
||||
The adapter is automatically detected from the connection string:
|
||||
|
||||
- URLs starting with `mysql://` or `mysql2://` use MySQL
|
||||
- URLs matching SQLite patterns (`:memory:`, `sqlite://`, `file://`) use SQLite
|
||||
- Everything else defaults to PostgreSQL
|
||||
|
||||
> Are MySQL stored procedures supported?
|
||||
|
||||
Yes, stored procedures are fully supported including OUT parameters and multiple result sets:
|
||||
|
||||
```ts
|
||||
// Call stored procedure
|
||||
const results = await mysql`CALL GetUserStats(${userId}, @total_orders)`;
|
||||
|
||||
// Get OUT parameter
|
||||
const outParam = await mysql`SELECT @total_orders as total`;
|
||||
```
|
||||
|
||||
> Can I use MySQL-specific SQL syntax?
|
||||
|
||||
Yes, you can use any MySQL-specific syntax:
|
||||
|
||||
```ts
|
||||
// MySQL-specific syntax works fine
|
||||
await mysql`SET @user_id = ${userId}`;
|
||||
await mysql`SHOW TABLES`;
|
||||
await mysql`DESCRIBE users`;
|
||||
await mysql`EXPLAIN SELECT * FROM users WHERE id = ${id}`;
|
||||
```
|
||||
|
||||
## Why not just use an existing library?
|
||||
> Why not just use an existing library?
|
||||
|
||||
npm packages like postgres.js, pg, and node-postgres can be used in Bun too. They're great options.
|
||||
|
||||
|
||||
@@ -663,8 +663,6 @@ class Statement<Params, ReturnType> {
|
||||
toString(): string; // serialize to SQL
|
||||
|
||||
columnNames: string[]; // the column names of the result set
|
||||
columnTypes: string[]; // types based on actual values in first row (call .get()/.all() first)
|
||||
declaredTypes: (string | null)[]; // types from CREATE TABLE schema (call .get()/.all() first)
|
||||
paramsCount: number; // the number of parameters expected by the statement
|
||||
native: any; // the native object representing the statement
|
||||
|
||||
|
||||
@@ -28,20 +28,6 @@ for await (const chunk of stream) {
|
||||
}
|
||||
```
|
||||
|
||||
`ReadableStream` also provides convenience methods for consuming the entire stream:
|
||||
|
||||
```ts
|
||||
const stream = new ReadableStream({
|
||||
start(controller) {
|
||||
controller.enqueue("hello world");
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
const data = await stream.text(); // => "hello world"
|
||||
// Also available: .json(), .bytes(), .blob()
|
||||
```
|
||||
|
||||
## Direct `ReadableStream`
|
||||
|
||||
Bun implements an optimized version of `ReadableStream` that avoid unnecessary data copying & queue management logic. With a traditional `ReadableStream`, chunks of data are _enqueued_. Each chunk is copied into a queue, where it sits until the stream is ready to send more data.
|
||||
@@ -222,8 +208,8 @@ export class ArrayBufferSink {
|
||||
*
|
||||
* This API might change later to separate Uint8ArraySink and ArrayBufferSink
|
||||
*/
|
||||
flush(): number | Uint8Array<ArrayBuffer> | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array<ArrayBuffer>;
|
||||
flush(): number | Uint8Array | ArrayBuffer;
|
||||
end(): ArrayBuffer | Uint8Array;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -602,40 +602,6 @@ dec.decode(decompressed);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
## `Bun.zstdCompress()` / `Bun.zstdCompressSync()`
|
||||
|
||||
Compresses a `Uint8Array` using the Zstandard algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
|
||||
// Synchronous
|
||||
const compressedSync = Bun.zstdCompressSync(buf);
|
||||
// Asynchronous
|
||||
const compressedAsync = await Bun.zstdCompress(buf);
|
||||
|
||||
// With compression level (1-22, default: 3)
|
||||
const compressedLevel = Bun.zstdCompressSync(buf, { level: 6 });
|
||||
```
|
||||
|
||||
## `Bun.zstdDecompress()` / `Bun.zstdDecompressSync()`
|
||||
|
||||
Decompresses a `Uint8Array` using the Zstandard algorithm.
|
||||
|
||||
```ts
|
||||
const buf = Buffer.from("hello".repeat(100));
|
||||
const compressed = Bun.zstdCompressSync(buf);
|
||||
|
||||
// Synchronous
|
||||
const decompressedSync = Bun.zstdDecompressSync(compressed);
|
||||
// Asynchronous
|
||||
const decompressedAsync = await Bun.zstdDecompress(compressed);
|
||||
|
||||
const dec = new TextDecoder();
|
||||
dec.decode(decompressedSync);
|
||||
// => "hellohellohello..."
|
||||
```
|
||||
|
||||
## `Bun.inspect()`
|
||||
|
||||
Serializes an object to a `string` exactly as it would be printed by `console.log`.
|
||||
@@ -806,65 +772,6 @@ console.log(obj); // => { foo: "bar" }
|
||||
|
||||
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
|
||||
|
||||
## `Bun.stripANSI()` ~6-57x faster `strip-ansi` alternative
|
||||
|
||||
`Bun.stripANSI(text: string): string`
|
||||
|
||||
Strip ANSI escape codes from a string. This is useful for removing colors and formatting from terminal output.
|
||||
|
||||
```ts
|
||||
const coloredText = "\u001b[31mHello\u001b[0m \u001b[32mWorld\u001b[0m";
|
||||
const plainText = Bun.stripANSI(coloredText);
|
||||
console.log(plainText); // => "Hello World"
|
||||
|
||||
// Works with various ANSI codes
|
||||
const formatted = "\u001b[1m\u001b[4mBold and underlined\u001b[0m";
|
||||
console.log(Bun.stripANSI(formatted)); // => "Bold and underlined"
|
||||
```
|
||||
|
||||
`Bun.stripANSI` is significantly faster than the popular [`strip-ansi`](https://www.npmjs.com/package/strip-ansi) npm package:
|
||||
|
||||
```js
|
||||
> bun bench/snippets/strip-ansi.mjs
|
||||
cpu: Apple M3 Max
|
||||
runtime: bun 1.2.21 (arm64-darwin)
|
||||
|
||||
benchmark avg (min … max) p75 / p99
|
||||
------------------------------------------------------- ----------
|
||||
Bun.stripANSI 11 chars no-ansi 8.13 ns/iter 8.27 ns
|
||||
(7.45 ns … 33.59 ns) 10.29 ns
|
||||
|
||||
Bun.stripANSI 13 chars ansi 51.68 ns/iter 52.51 ns
|
||||
(46.16 ns … 113.71 ns) 57.71 ns
|
||||
|
||||
Bun.stripANSI 16,384 chars long-no-ansi 298.39 ns/iter 305.44 ns
|
||||
(281.50 ns … 331.65 ns) 320.70 ns
|
||||
|
||||
Bun.stripANSI 212,992 chars long-ansi 227.65 µs/iter 234.50 µs
|
||||
(216.46 µs … 401.92 µs) 262.25 µs
|
||||
```
|
||||
|
||||
```js
|
||||
> node bench/snippets/strip-ansi.mjs
|
||||
cpu: Apple M3 Max
|
||||
runtime: node 24.6.0 (arm64-darwin)
|
||||
|
||||
benchmark avg (min … max) p75 / p99
|
||||
-------------------------------------------------------- ---------
|
||||
npm/strip-ansi 11 chars no-ansi 466.79 ns/iter 468.67 ns
|
||||
(454.08 ns … 570.67 ns) 543.67 ns
|
||||
|
||||
npm/strip-ansi 13 chars ansi 546.77 ns/iter 550.23 ns
|
||||
(532.74 ns … 651.08 ns) 590.35 ns
|
||||
|
||||
npm/strip-ansi 16,384 chars long-no-ansi 4.85 µs/iter 4.89 µs
|
||||
(4.71 µs … 5.00 µs) 4.98 µs
|
||||
|
||||
npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
|
||||
(1.27 ms … 1.73 ms) 1.49 ms
|
||||
|
||||
```
|
||||
|
||||
## `estimateShallowMemoryUsageOf` in `bun:jsc`
|
||||
|
||||
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.
|
||||
|
||||
@@ -279,9 +279,6 @@ Bun implements the `WebSocket` class. To create a WebSocket client that connects
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("ws://localhost:3000");
|
||||
|
||||
// With subprotocol negotiation
|
||||
const socket2 = new WebSocket("ws://localhost:3000", ["soap", "wamp"]);
|
||||
```
|
||||
|
||||
In browsers, the cookies that are currently set on the page will be sent with the WebSocket upgrade request. This is a standard feature of the `WebSocket` API.
|
||||
@@ -296,17 +293,6 @@ const socket = new WebSocket("ws://localhost:3000", {
|
||||
});
|
||||
```
|
||||
|
||||
### Client compression
|
||||
|
||||
WebSocket clients support permessage-deflate compression. The `extensions` property shows negotiated compression:
|
||||
|
||||
```ts
|
||||
const socket = new WebSocket("wss://echo.websocket.org");
|
||||
socket.addEventListener("open", () => {
|
||||
console.log(socket.extensions); // => "permessage-deflate"
|
||||
});
|
||||
```
|
||||
|
||||
To add event listeners to the socket:
|
||||
|
||||
```ts
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% callout %}
|
||||
**🚧** — The `Worker` API is still experimental (particularly for terminating workers). We are actively working on improving this.
|
||||
**🚧** — The `Worker` API is still experimental and should not be considered ready for production.
|
||||
{% /callout %}
|
||||
|
||||
[`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) lets you start and communicate with a new JavaScript instance running on a separate thread while sharing I/O resources with the main thread.
|
||||
@@ -122,59 +122,6 @@ Messages are automatically enqueued until the worker is ready, so there is no ne
|
||||
|
||||
To send messages, use [`worker.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Worker/postMessage) and [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage). This leverages the [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
|
||||
|
||||
### Performance optimizations
|
||||
|
||||
Bun includes optimized fast paths for `postMessage` to dramatically improve performance for common data types:
|
||||
|
||||
**String fast path** - When posting pure string values, Bun bypasses the structured clone algorithm entirely, achieving significant performance gains with no serialization overhead.
|
||||
|
||||
**Simple object fast path** - For plain objects containing only primitive values (strings, numbers, booleans, null, undefined), Bun uses an optimized serialization path that stores properties directly without full structured cloning.
|
||||
|
||||
The simple object fast path activates when the object:
|
||||
|
||||
- Is a plain object with no prototype chain modifications
|
||||
- Contains only enumerable, configurable data properties
|
||||
- Has no indexed properties or getter/setter methods
|
||||
- All property values are primitives or strings
|
||||
|
||||
With these fast paths, Bun's `postMessage` performs **2-241x faster** because the message length no longer has a meaningful impact on performance.
|
||||
|
||||
**Bun (with fast paths):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 648ns
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 719ns
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 1.26µs
|
||||
```
|
||||
|
||||
**Node.js v24.6.0 (for comparison):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 1.19µs
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 2.69µs
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 304µs
|
||||
```
|
||||
|
||||
```js
|
||||
// String fast path - optimized
|
||||
postMessage("Hello, worker!");
|
||||
|
||||
// Simple object fast path - optimized
|
||||
postMessage({
|
||||
message: "Hello",
|
||||
count: 42,
|
||||
enabled: true,
|
||||
data: null,
|
||||
});
|
||||
|
||||
// Complex objects still work but use standard structured clone
|
||||
postMessage({
|
||||
nested: { deep: { object: true } },
|
||||
date: new Date(),
|
||||
buffer: new ArrayBuffer(8),
|
||||
});
|
||||
```
|
||||
|
||||
```js
|
||||
// On the worker thread, `postMessage` is automatically "routed" to the parent thread.
|
||||
postMessage({ hello: "world" });
|
||||
@@ -282,31 +229,6 @@ const worker = new Worker("./i-am-smol.ts", {
|
||||
Setting `smol: true` sets `JSC::HeapSize` to be `Small` instead of the default `Large`.
|
||||
{% /details %}
|
||||
|
||||
## Environment Data
|
||||
|
||||
Share data between the main thread and workers using `setEnvironmentData()` and `getEnvironmentData()`.
|
||||
|
||||
```js
|
||||
import { setEnvironmentData, getEnvironmentData } from "worker_threads";
|
||||
|
||||
// In main thread
|
||||
setEnvironmentData("config", { apiUrl: "https://api.example.com" });
|
||||
|
||||
// In worker
|
||||
const config = getEnvironmentData("config");
|
||||
console.log(config); // => { apiUrl: "https://api.example.com" }
|
||||
```
|
||||
|
||||
## Worker Events
|
||||
|
||||
Listen for worker creation events using `process.emit()`:
|
||||
|
||||
```js
|
||||
process.on("worker", worker => {
|
||||
console.log("New worker created:", worker.threadId);
|
||||
});
|
||||
```
|
||||
|
||||
## `Bun.isMainThread`
|
||||
|
||||
You can check if you're in the main thread by checking `Bun.isMainThread`.
|
||||
|
||||
668
docs/api/yaml.md
668
docs/api/yaml.md
@@ -1,668 +0,0 @@
|
||||
In Bun, YAML is a first-class citizen alongside JSON and TOML.
|
||||
|
||||
Bun provides built-in support for YAML files through both runtime APIs and bundler integration. You can
|
||||
|
||||
- Parse YAML strings with `Bun.YAML.parse`
|
||||
- Stringify JavaScript objects to YAML with `Bun.YAML.stringify`
|
||||
- import & require YAML files as modules at runtime (including hot reloading & watch mode support)
|
||||
- import & require YAML files in frontend apps via bun's bundler
|
||||
|
||||
## Conformance
|
||||
|
||||
Bun's YAML parser currently passes over 90% of the official YAML test suite. While we're actively working on reaching 100% conformance, the current implementation covers the vast majority of real-world use cases. The parser is written in Zig for optimal performance and is continuously being improved.
|
||||
|
||||
## Runtime API
|
||||
|
||||
### `Bun.YAML.parse()`
|
||||
|
||||
Parse a YAML string into a JavaScript object.
|
||||
|
||||
```ts
|
||||
import { YAML } from "bun";
|
||||
const text = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
email: john@example.com
|
||||
hobbies:
|
||||
- reading
|
||||
- coding
|
||||
- hiking
|
||||
`;
|
||||
|
||||
const data = YAML.parse(text);
|
||||
console.log(data);
|
||||
// {
|
||||
// name: "John Doe",
|
||||
// age: 30,
|
||||
// email: "john@example.com",
|
||||
// hobbies: ["reading", "coding", "hiking"]
|
||||
// }
|
||||
```
|
||||
|
||||
#### Multi-document YAML
|
||||
|
||||
When parsing YAML with multiple documents (separated by `---`), `Bun.YAML.parse()` returns an array:
|
||||
|
||||
```ts
|
||||
const multiDoc = `
|
||||
---
|
||||
name: Document 1
|
||||
---
|
||||
name: Document 2
|
||||
---
|
||||
name: Document 3
|
||||
`;
|
||||
|
||||
const docs = Bun.YAML.parse(multiDoc);
|
||||
console.log(docs);
|
||||
// [
|
||||
// { name: "Document 1" },
|
||||
// { name: "Document 2" },
|
||||
// { name: "Document 3" }
|
||||
// ]
|
||||
```
|
||||
|
||||
#### Supported YAML Features
|
||||
|
||||
Bun's YAML parser supports the full YAML 1.2 specification, including:
|
||||
|
||||
- **Scalars**: strings, numbers, booleans, null values
|
||||
- **Collections**: sequences (arrays) and mappings (objects)
|
||||
- **Anchors and Aliases**: reusable nodes with `&` and `*`
|
||||
- **Tags**: type hints like `!!str`, `!!int`, `!!float`, `!!bool`, `!!null`
|
||||
- **Multi-line strings**: literal (`|`) and folded (`>`) scalars
|
||||
- **Comments**: using `#`
|
||||
- **Directives**: `%YAML` and `%TAG`
|
||||
|
||||
```ts
|
||||
const yaml = `
|
||||
# Employee record
|
||||
employee: &emp
|
||||
name: Jane Smith
|
||||
department: Engineering
|
||||
skills:
|
||||
- JavaScript
|
||||
- TypeScript
|
||||
- React
|
||||
|
||||
manager: *emp # Reference to employee
|
||||
|
||||
config: !!str 123 # Explicit string type
|
||||
|
||||
description: |
|
||||
This is a multi-line
|
||||
literal string that preserves
|
||||
line breaks and spacing.
|
||||
|
||||
summary: >
|
||||
This is a folded string
|
||||
that joins lines with spaces
|
||||
unless there are blank lines.
|
||||
`;
|
||||
|
||||
const data = Bun.YAML.parse(yaml);
|
||||
```
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.YAML.parse()` throws an error if the YAML is invalid:
|
||||
|
||||
```ts
|
||||
try {
|
||||
Bun.YAML.parse("invalid: yaml: content:");
|
||||
} catch (error) {
|
||||
console.error("Failed to parse YAML:", error.message);
|
||||
}
|
||||
```
|
||||
|
||||
### `Bun.YAML.stringify()`
|
||||
|
||||
Convert a JavaScript value into a YAML string. The API signature matches `JSON.stringify`:
|
||||
|
||||
```ts
|
||||
YAML.stringify(value, replacer?, space?)
|
||||
```
|
||||
|
||||
- `value`: The value to convert to YAML
|
||||
- `replacer`: Currently only `null` or `undefined` (function replacers not yet supported)
|
||||
- `space`: Number of spaces for indentation (e.g., `2`) or a string to use for indentation. **Without this parameter, outputs flow-style (single-line) YAML**
|
||||
|
||||
#### Basic Usage
|
||||
|
||||
```ts
|
||||
import { YAML } from "bun";
|
||||
|
||||
const data = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
hobbies: ["reading", "coding"],
|
||||
};
|
||||
|
||||
// Without space - outputs flow-style (single-line) YAML
|
||||
console.log(YAML.stringify(data));
|
||||
// {name: John Doe,age: 30,hobbies: [reading,coding]}
|
||||
|
||||
// With space=2 - outputs block-style (multi-line) YAML
|
||||
console.log(YAML.stringify(data, null, 2));
|
||||
// name: John Doe
|
||||
// age: 30
|
||||
// hobbies:
|
||||
// - reading
|
||||
// - coding
|
||||
```
|
||||
|
||||
#### Output Styles
|
||||
|
||||
```ts
|
||||
const arr = [1, 2, 3];
|
||||
|
||||
// Flow style (single-line) - default
|
||||
console.log(YAML.stringify(arr));
|
||||
// [1,2,3]
|
||||
|
||||
// Block style (multi-line) - with indentation
|
||||
console.log(YAML.stringify(arr, null, 2));
|
||||
// - 1
|
||||
// - 2
|
||||
// - 3
|
||||
```
|
||||
|
||||
#### String Quoting
|
||||
|
||||
`YAML.stringify()` automatically quotes strings when necessary:
|
||||
|
||||
- Strings that would be parsed as YAML keywords (`true`, `false`, `null`, `yes`, `no`, etc.)
|
||||
- Strings that would be parsed as numbers
|
||||
- Strings containing special characters or escape sequences
|
||||
|
||||
```ts
|
||||
const examples = {
|
||||
keyword: "true", // Will be quoted: "true"
|
||||
number: "123", // Will be quoted: "123"
|
||||
text: "hello world", // Won't be quoted: hello world
|
||||
empty: "", // Will be quoted: ""
|
||||
};
|
||||
|
||||
console.log(YAML.stringify(examples, null, 2));
|
||||
// keyword: "true"
|
||||
// number: "123"
|
||||
// text: hello world
|
||||
// empty: ""
|
||||
```
|
||||
|
||||
#### Cycles and References
|
||||
|
||||
`YAML.stringify()` automatically detects and handles circular references using YAML anchors and aliases:
|
||||
|
||||
```ts
|
||||
const obj = { name: "root" };
|
||||
obj.self = obj; // Circular reference
|
||||
|
||||
const yamlString = YAML.stringify(obj, null, 2);
|
||||
console.log(yamlString);
|
||||
// &root
|
||||
// name: root
|
||||
// self:
|
||||
// *root
|
||||
|
||||
// Objects with shared references
|
||||
const shared = { id: 1 };
|
||||
const data = {
|
||||
first: shared,
|
||||
second: shared,
|
||||
};
|
||||
|
||||
console.log(YAML.stringify(data, null, 2));
|
||||
// first:
|
||||
// &first
|
||||
// id: 1
|
||||
// second:
|
||||
// *first
|
||||
```
|
||||
|
||||
#### Special Values
|
||||
|
||||
```ts
|
||||
// Special numeric values
|
||||
console.log(YAML.stringify(Infinity)); // .inf
|
||||
console.log(YAML.stringify(-Infinity)); // -.inf
|
||||
console.log(YAML.stringify(NaN)); // .nan
|
||||
console.log(YAML.stringify(0)); // 0
|
||||
console.log(YAML.stringify(-0)); // -0
|
||||
|
||||
// null and undefined
|
||||
console.log(YAML.stringify(null)); // null
|
||||
console.log(YAML.stringify(undefined)); // undefined (returns undefined, not a string)
|
||||
|
||||
// Booleans
|
||||
console.log(YAML.stringify(true)); // true
|
||||
console.log(YAML.stringify(false)); // false
|
||||
```
|
||||
|
||||
#### Complex Objects
|
||||
|
||||
```ts
|
||||
const config = {
|
||||
server: {
|
||||
port: 3000,
|
||||
host: "localhost",
|
||||
ssl: {
|
||||
enabled: true,
|
||||
cert: "/path/to/cert.pem",
|
||||
key: "/path/to/key.pem",
|
||||
},
|
||||
},
|
||||
database: {
|
||||
connections: [
|
||||
{ name: "primary", host: "db1.example.com" },
|
||||
{ name: "replica", host: "db2.example.com" },
|
||||
],
|
||||
},
|
||||
features: {
|
||||
auth: true,
|
||||
"rate-limit": 100, // Keys with special characters are preserved
|
||||
},
|
||||
};
|
||||
|
||||
const yamlString = YAML.stringify(config, null, 2);
|
||||
console.log(yamlString);
|
||||
// server:
|
||||
// port: 3000
|
||||
// host: localhost
|
||||
// ssl:
|
||||
// enabled: true
|
||||
// cert: /path/to/cert.pem
|
||||
// key: /path/to/key.pem
|
||||
// database:
|
||||
// connections:
|
||||
// - name: primary
|
||||
// host: db1.example.com
|
||||
// - name: replica
|
||||
// host: db2.example.com
|
||||
// features:
|
||||
// auth: true
|
||||
// rate-limit: 100
|
||||
```
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
|
||||
You can import YAML files directly as ES modules. The YAML content is parsed and made available as both default and named exports:
|
||||
|
||||
```yaml#config.yaml
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: myapp
|
||||
|
||||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
|
||||
features:
|
||||
auth: true
|
||||
rateLimit: true
|
||||
analytics: false
|
||||
```
|
||||
|
||||
#### Default Import
|
||||
|
||||
```ts#app.ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
console.log(config.database.host); // "localhost"
|
||||
console.log(config.redis.port); // 6379
|
||||
```
|
||||
|
||||
#### Named Imports
|
||||
|
||||
You can destructure top-level YAML properties as named imports:
|
||||
|
||||
```ts
|
||||
import { database, redis, features } from "./config.yaml";
|
||||
|
||||
console.log(database.host); // "localhost"
|
||||
console.log(redis.port); // 6379
|
||||
console.log(features.auth); // true
|
||||
```
|
||||
|
||||
Or combine both:
|
||||
|
||||
```ts
|
||||
import config, { database, features } from "./config.yaml";
|
||||
|
||||
// Use the full config object
|
||||
console.log(config);
|
||||
|
||||
// Or use specific parts
|
||||
if (features.rateLimit) {
|
||||
setupRateLimiting(database);
|
||||
}
|
||||
```
|
||||
|
||||
### CommonJS
|
||||
|
||||
YAML files can also be required in CommonJS:
|
||||
|
||||
```js
|
||||
const config = require("./config.yaml");
|
||||
console.log(config.database.name); // "myapp"
|
||||
|
||||
// Destructuring also works
|
||||
const { database, redis } = require("./config.yaml");
|
||||
console.log(database.port); // 5432
|
||||
```
|
||||
|
||||
### TypeScript Support
|
||||
|
||||
While Bun can import YAML files directly, TypeScript doesn't know the types of your YAML files by default. To add TypeScript support for your YAML imports, create a declaration file with `.d.ts` appended to the YAML filename (e.g., `config.yaml` → `config.yaml.d.ts`):
|
||||
|
||||
```yaml#config.yaml
|
||||
features: "advanced"
|
||||
server:
|
||||
host: localhost
|
||||
port: 3000
|
||||
```
|
||||
|
||||
```ts#config.yaml.d.ts
|
||||
const contents: {
|
||||
features: string;
|
||||
server: {
|
||||
host: string;
|
||||
port: number;
|
||||
};
|
||||
};
|
||||
|
||||
export = contents;
|
||||
```
|
||||
|
||||
Now TypeScript will provide proper type checking and auto-completion:
|
||||
|
||||
```ts#app.ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
// TypeScript knows the types!
|
||||
config.server.port; // number
|
||||
config.server.host; // string
|
||||
config.features; // string
|
||||
|
||||
// TypeScript will catch errors
|
||||
config.server.unknown; // Error: Property 'unknown' does not exist
|
||||
```
|
||||
|
||||
This approach works for both ES modules and CommonJS, giving you full type safety while Bun continues to handle the actual YAML parsing at runtime.
|
||||
|
||||
## Hot Reloading with YAML
|
||||
|
||||
One of the most powerful features of Bun's YAML support is hot reloading. When you run your application with `bun --hot`, changes to YAML files are automatically detected and reloaded without closing connections
|
||||
|
||||
### Configuration Hot Reloading
|
||||
|
||||
```yaml#config.yaml
|
||||
server:
|
||||
port: 3000
|
||||
host: localhost
|
||||
|
||||
features:
|
||||
debug: true
|
||||
verbose: false
|
||||
```
|
||||
|
||||
```ts#server.ts
|
||||
import { server, features } from "./config.yaml";
|
||||
|
||||
console.log(`Starting server on ${server.host}:${server.port}`);
|
||||
|
||||
if (features.debug) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
|
||||
// Your server code here
|
||||
Bun.serve({
|
||||
port: server.port,
|
||||
hostname: server.host,
|
||||
fetch(req) {
|
||||
if (features.verbose) {
|
||||
console.log(`${req.method} ${req.url}`);
|
||||
}
|
||||
return new Response("Hello World");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Run with hot reloading:
|
||||
|
||||
```bash
|
||||
bun --hot server.ts
|
||||
```
|
||||
|
||||
Now when you modify `config.yaml`, the changes are immediately reflected in your running application. This is perfect for:
|
||||
|
||||
- Adjusting configuration during development
|
||||
- Testing different settings without restarts
|
||||
- Live debugging with configuration changes
|
||||
- Feature flag toggling
|
||||
|
||||
## Configuration Management
|
||||
|
||||
### Environment-Based Configuration
|
||||
|
||||
YAML excels at managing configuration across different environments:
|
||||
|
||||
```yaml#config.yaml
|
||||
defaults: &defaults
|
||||
timeout: 5000
|
||||
retries: 3
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 3600
|
||||
|
||||
development:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: http://localhost:4000
|
||||
key: dev_key_12345
|
||||
logging:
|
||||
level: debug
|
||||
pretty: true
|
||||
|
||||
staging:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: https://staging-api.example.com
|
||||
key: ${STAGING_API_KEY}
|
||||
logging:
|
||||
level: info
|
||||
pretty: false
|
||||
|
||||
production:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: https://api.example.com
|
||||
key: ${PROD_API_KEY}
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 86400
|
||||
logging:
|
||||
level: error
|
||||
pretty: false
|
||||
```
|
||||
|
||||
```ts#app.ts
|
||||
import configs from "./config.yaml";
|
||||
|
||||
const env = process.env.NODE_ENV || "development";
|
||||
const config = configs[env];
|
||||
|
||||
// Environment variables in YAML values can be interpolated
|
||||
function interpolateEnvVars(obj: any): any {
|
||||
if (typeof obj === "string") {
|
||||
return obj.replace(/\${(\w+)}/g, (_, key) => process.env[key] || "");
|
||||
}
|
||||
if (typeof obj === "object") {
|
||||
for (const key in obj) {
|
||||
obj[key] = interpolateEnvVars(obj[key]);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
export default interpolateEnvVars(config);
|
||||
```
|
||||
|
||||
### Feature Flags Configuration
|
||||
|
||||
```yaml#features.yaml
|
||||
features:
|
||||
newDashboard:
|
||||
enabled: true
|
||||
rolloutPercentage: 50
|
||||
allowedUsers:
|
||||
- admin@example.com
|
||||
- beta@example.com
|
||||
|
||||
experimentalAPI:
|
||||
enabled: false
|
||||
endpoints:
|
||||
- /api/v2/experimental
|
||||
- /api/v2/beta
|
||||
|
||||
darkMode:
|
||||
enabled: true
|
||||
default: auto # auto, light, dark
|
||||
```
|
||||
|
||||
```ts#feature-flags.ts
|
||||
import { features } from "./features.yaml";
|
||||
|
||||
export function isFeatureEnabled(
|
||||
featureName: string,
|
||||
userEmail?: string,
|
||||
): boolean {
|
||||
const feature = features[featureName];
|
||||
|
||||
if (!feature?.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check rollout percentage
|
||||
if (feature.rolloutPercentage < 100) {
|
||||
const hash = hashCode(userEmail || "anonymous");
|
||||
if (hash % 100 >= feature.rolloutPercentage) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check allowed users
|
||||
if (feature.allowedUsers && userEmail) {
|
||||
return feature.allowedUsers.includes(userEmail);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Use with hot reloading to toggle features in real-time
|
||||
if (isFeatureEnabled("newDashboard", user.email)) {
|
||||
renderNewDashboard();
|
||||
} else {
|
||||
renderLegacyDashboard();
|
||||
}
|
||||
```
|
||||
|
||||
### Database Configuration
|
||||
|
||||
```yaml#database.yaml
|
||||
connections:
|
||||
primary:
|
||||
type: postgres
|
||||
host: ${DB_HOST:-localhost}
|
||||
port: ${DB_PORT:-5432}
|
||||
database: ${DB_NAME:-myapp}
|
||||
username: ${DB_USER:-postgres}
|
||||
password: ${DB_PASS}
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
idleTimeout: 30000
|
||||
|
||||
cache:
|
||||
type: redis
|
||||
host: ${REDIS_HOST:-localhost}
|
||||
port: ${REDIS_PORT:-6379}
|
||||
password: ${REDIS_PASS}
|
||||
db: 0
|
||||
|
||||
analytics:
|
||||
type: clickhouse
|
||||
host: ${ANALYTICS_HOST:-localhost}
|
||||
port: 8123
|
||||
database: analytics
|
||||
|
||||
migrations:
|
||||
autoRun: ${AUTO_MIGRATE:-false}
|
||||
directory: ./migrations
|
||||
|
||||
seeds:
|
||||
enabled: ${SEED_DB:-false}
|
||||
directory: ./seeds
|
||||
```
|
||||
|
||||
```ts#db.ts
|
||||
import { connections, migrations } from "./database.yaml";
|
||||
import { createConnection } from "./database-driver";
|
||||
|
||||
// Parse environment variables with defaults
|
||||
function parseConfig(config: any) {
|
||||
return JSON.parse(
|
||||
JSON.stringify(config).replace(
|
||||
/\${([^:-]+)(?::([^}]+))?}/g,
|
||||
(_, key, defaultValue) => process.env[key] || defaultValue || "",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const dbConfig = parseConfig(connections);
|
||||
|
||||
export const db = await createConnection(dbConfig.primary);
|
||||
export const cache = await createConnection(dbConfig.cache);
|
||||
export const analytics = await createConnection(dbConfig.analytics);
|
||||
|
||||
// Auto-run migrations if configured
|
||||
if (parseConfig(migrations).autoRun === "true") {
|
||||
await runMigrations(db, migrations.directory);
|
||||
}
|
||||
```
|
||||
|
||||
### Bundler Integration
|
||||
|
||||
When you import YAML files in your application and bundle it with Bun, the YAML is parsed at build time and included as a JavaScript module:
|
||||
|
||||
```bash
|
||||
bun build app.ts --outdir=dist
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- Zero runtime YAML parsing overhead in production
|
||||
- Smaller bundle sizes
|
||||
- Tree-shaking support for unused configuration (named imports)
|
||||
|
||||
### Dynamic Imports
|
||||
|
||||
YAML files can be dynamically imported, useful for loading configuration on demand:
|
||||
|
||||
```ts#Load configuration based on environment
|
||||
const env = process.env.NODE_ENV || "development";
|
||||
const config = await import(`./configs/${env}.yaml`);
|
||||
|
||||
// Load user-specific settings
|
||||
async function loadUserSettings(userId: string) {
|
||||
try {
|
||||
const settings = await import(`./users/${userId}/settings.yaml`);
|
||||
return settings.default;
|
||||
} catch {
|
||||
return await import("./users/default-settings.yaml");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -140,19 +140,6 @@ The `--sourcemap` argument embeds a sourcemap compressed with zstd, so that erro
|
||||
|
||||
The `--bytecode` argument enables bytecode compilation. Every time you run JavaScript code in Bun, JavaScriptCore (the engine) will compile your source code into bytecode. We can move this parsing work from runtime to bundle time, saving you startup time.
|
||||
|
||||
## Embedding runtime arguments
|
||||
|
||||
**`--compile-exec-argv="args"`** - Embed runtime arguments that are available via `process.execArgv`:
|
||||
|
||||
```bash
|
||||
bun build --compile --compile-exec-argv="--smol --user-agent=MyBot" ./app.ts --outfile myapp
|
||||
```
|
||||
|
||||
```js
|
||||
// In the compiled app
|
||||
console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
```
|
||||
|
||||
## Act as the Bun CLI
|
||||
|
||||
{% note %}
|
||||
@@ -421,119 +408,16 @@ $ bun build --compile --asset-naming="[name].[ext]" ./index.ts
|
||||
|
||||
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
|
||||
|
||||
## Using Bun.build() API
|
||||
|
||||
You can also generate standalone executables using the `Bun.build()` JavaScript API. This is useful when you need programmatic control over the build process.
|
||||
|
||||
### Basic usage
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: {
|
||||
target: "bun-windows-x64",
|
||||
outfile: "myapp.exe",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Windows metadata with Bun.build()
|
||||
|
||||
When targeting Windows, you can specify metadata through the `windows` object:
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: {
|
||||
target: "bun-windows-x64",
|
||||
outfile: "myapp.exe",
|
||||
windows: {
|
||||
title: "My Application",
|
||||
publisher: "My Company Inc",
|
||||
version: "1.2.3.4",
|
||||
description: "A powerful application built with Bun",
|
||||
copyright: "© 2024 My Company Inc",
|
||||
hideConsole: false, // Set to true for GUI applications
|
||||
icon: "./icon.ico", // Path to icon file
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Cross-compilation with Bun.build()
|
||||
|
||||
You can cross-compile for different platforms:
|
||||
|
||||
```js
|
||||
// Build for multiple platforms
|
||||
const platforms = [
|
||||
{ target: "bun-windows-x64", outfile: "app-windows.exe" },
|
||||
{ target: "bun-linux-x64", outfile: "app-linux" },
|
||||
{ target: "bun-darwin-arm64", outfile: "app-macos" },
|
||||
];
|
||||
|
||||
for (const platform of platforms) {
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: platform,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Windows-specific flags
|
||||
|
||||
When compiling a standalone executable for Windows, there are several platform-specific options that can be used to customize the generated `.exe` file:
|
||||
When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file:
|
||||
|
||||
### Visual customization
|
||||
|
||||
- `--windows-icon=path/to/icon.ico` - Set the executable file icon
|
||||
- `--windows-hide-console` - Disable the background terminal window (useful for GUI applications)
|
||||
|
||||
### Metadata customization
|
||||
|
||||
You can embed version information and other metadata into your Windows executable:
|
||||
|
||||
- `--windows-title <STR>` - Set the product name (appears in file properties)
|
||||
- `--windows-publisher <STR>` - Set the company name
|
||||
- `--windows-version <STR>` - Set the version number (e.g. "1.2.3.4")
|
||||
- `--windows-description <STR>` - Set the file description
|
||||
- `--windows-copyright <STR>` - Set the copyright information
|
||||
|
||||
#### Example with all metadata flags:
|
||||
|
||||
```sh
|
||||
bun build --compile ./app.ts \
|
||||
--outfile myapp.exe \
|
||||
--windows-title "My Application" \
|
||||
--windows-publisher "My Company Inc" \
|
||||
--windows-version "1.2.3.4" \
|
||||
--windows-description "A powerful application built with Bun" \
|
||||
--windows-copyright "© 2024 My Company Inc"
|
||||
```
|
||||
|
||||
This metadata will be visible in Windows Explorer when viewing the file properties:
|
||||
|
||||
1. Right-click the executable in Windows Explorer
|
||||
2. Select "Properties"
|
||||
3. Go to the "Details" tab
|
||||
|
||||
#### Version string format
|
||||
|
||||
The `--windows-version` flag accepts version strings in the following formats:
|
||||
|
||||
- `"1"` - Will be normalized to "1.0.0.0"
|
||||
- `"1.2"` - Will be normalized to "1.2.0.0"
|
||||
- `"1.2.3"` - Will be normalized to "1.2.3.0"
|
||||
- `"1.2.3.4"` - Full version format
|
||||
|
||||
Each version component must be a number between 0 and 65535.
|
||||
- `--windows-icon=path/to/icon.ico` to customize the executable file icon.
|
||||
- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY.
|
||||
|
||||
{% callout %}
|
||||
|
||||
These flags currently cannot be used when cross-compiling because they depend on Windows APIs. They are only available when building on Windows itself.
|
||||
These flags currently cannot be used when cross-compiling because they depend on Windows APIs.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Bun's fast native bundler can be used via the `bun build` CLI command or the `Bun.build()` JavaScript API.
|
||||
Bun's fast native bundler is now in beta. It can be used via the `bun build` CLI command or the `Bun.build()` JavaScript API.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
@@ -313,14 +313,6 @@ $ bun build --entrypoints ./index.ts --outdir ./out --target browser
|
||||
|
||||
Depending on the target, Bun will apply different module resolution rules and optimizations.
|
||||
|
||||
### Module resolution
|
||||
|
||||
Bun supports the `NODE_PATH` environment variable for additional module resolution paths:
|
||||
|
||||
```bash
|
||||
NODE_PATH=./src bun build ./entry.js --outdir ./dist
|
||||
```
|
||||
|
||||
<!-- - Module resolution. For example, when bundling for the browser, Bun will prioritize the `"browser"` export condition when resolving imports. An error will be thrown if any Node.js or Bun built-ins are imported or used, e.g. `node:fs` or `Bun.serve`. -->
|
||||
|
||||
{% table %}
|
||||
@@ -400,55 +392,6 @@ $ bun build ./index.tsx --outdir ./out --format cjs
|
||||
|
||||
TODO: document IIFE once we support globalNames.
|
||||
|
||||
### `jsx`
|
||||
|
||||
Configure JSX transform behavior. Allows fine-grained control over how JSX is compiled.
|
||||
|
||||
**Classic runtime example** (uses `factory` and `fragment`):
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./app.tsx'],
|
||||
outdir: './out',
|
||||
jsx: {
|
||||
factory: 'h',
|
||||
fragment: 'Fragment',
|
||||
runtime: 'classic',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
# JSX configuration is handled via bunfig.toml or tsconfig.json
|
||||
$ bun build ./app.tsx --outdir ./out
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
**Automatic runtime example** (uses `importSource`):
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./app.tsx'],
|
||||
outdir: './out',
|
||||
jsx: {
|
||||
importSource: 'preact',
|
||||
runtime: 'automatic',
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
# JSX configuration is handled via bunfig.toml or tsconfig.json
|
||||
$ bun build ./app.tsx --outdir ./out
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `splitting`
|
||||
|
||||
Whether to enable code splitting.
|
||||
@@ -790,10 +733,6 @@ Whether to enable minification. Default `false`.
|
||||
When targeting `bun`, identifiers will be minified by default.
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
When `minify.syntax` is enabled, unused function and class expression names are removed unless `minify.keepNames` is set to `true` or `--keep-names` flag is used.
|
||||
{% /callout %}
|
||||
|
||||
To enable all minification options:
|
||||
|
||||
{% codetabs group="a" %}
|
||||
@@ -824,16 +763,12 @@ await Bun.build({
|
||||
whitespace: true,
|
||||
identifiers: true,
|
||||
syntax: true,
|
||||
keepNames: false, // default
|
||||
},
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ bun build ./index.tsx --outdir ./out --minify-whitespace --minify-identifiers --minify-syntax
|
||||
|
||||
# To preserve function and class names during minification:
|
||||
$ bun build ./index.tsx --outdir ./out --minify --keep-names
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
@@ -1324,33 +1259,6 @@ $ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=any
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `throw`
|
||||
|
||||
Controls error handling behavior when the build fails. When set to `true` (default), the returned promise rejects with an `AggregateError`. When set to `false`, the promise resolves with a `BuildOutput` object where `success` is `false`.
|
||||
|
||||
```ts#JavaScript
|
||||
// Default behavior: throws on error
|
||||
try {
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
throw: true, // default
|
||||
});
|
||||
} catch (error) {
|
||||
// Handle AggregateError
|
||||
console.error("Build failed:", error);
|
||||
}
|
||||
|
||||
// Alternative: handle errors via success property
|
||||
const result = await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
throw: false,
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
console.error("Build failed with errors:", result.logs);
|
||||
}
|
||||
```
|
||||
|
||||
## Outputs
|
||||
|
||||
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
|
||||
@@ -1576,15 +1484,6 @@ interface BuildConfig {
|
||||
* @default "esm"
|
||||
*/
|
||||
format?: "esm" | "cjs" | "iife";
|
||||
/**
|
||||
* JSX configuration object for controlling JSX transform behavior
|
||||
*/
|
||||
jsx?: {
|
||||
factory?: string;
|
||||
fragment?: string;
|
||||
importSource?: string;
|
||||
runtime?: "automatic" | "classic";
|
||||
};
|
||||
naming?:
|
||||
| string
|
||||
| {
|
||||
@@ -1627,7 +1526,6 @@ interface BuildConfig {
|
||||
whitespace?: boolean;
|
||||
syntax?: boolean;
|
||||
identifiers?: boolean;
|
||||
keepNames?: boolean;
|
||||
};
|
||||
/**
|
||||
* Ignore dead code elimination/tree-shaking annotations such as @__PURE__ and package.json
|
||||
@@ -1671,7 +1569,8 @@ interface BuildConfig {
|
||||
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
|
||||
*
|
||||
* This defaults to `true`.
|
||||
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
|
||||
* as most usage of `Bun.build` forgets to check for errors.
|
||||
*/
|
||||
throw?: boolean;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.txt` `.wasm` `.node` `.html`
|
||||
|
||||
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](https://bun.com/docs/bundler/plugins) that extend Bun with custom loaders.
|
||||
|
||||
@@ -121,55 +121,6 @@ export default {
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `yaml`
|
||||
|
||||
**YAML loader**. Default for `.yaml` and `.yml`.
|
||||
|
||||
YAML files can be directly imported. Bun will parse them with its fast native YAML parser.
|
||||
|
||||
```ts
|
||||
import config from "./config.yaml";
|
||||
config.database.host; // => "localhost"
|
||||
|
||||
// via import attribute:
|
||||
// import myCustomYAML from './my.config' with {type: "yaml"};
|
||||
```
|
||||
|
||||
During bundling, the parsed YAML is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```ts
|
||||
var config = {
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
},
|
||||
// ...other fields
|
||||
};
|
||||
config.database.host;
|
||||
```
|
||||
|
||||
If a `.yaml` or `.yml` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```yaml#Input
|
||||
name: John Doe
|
||||
age: 35
|
||||
email: johndoe@example.com
|
||||
```
|
||||
|
||||
```js#Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com"
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For more details on YAML support including the runtime API `Bun.YAML.parse()`, see the [YAML API documentation](/docs/api/yaml).
|
||||
|
||||
### `text`
|
||||
|
||||
**Text loader**. Default for `.txt`.
|
||||
|
||||
@@ -9,7 +9,6 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
|
||||
- [`onStart()`](#onstart): Run once the bundler has started a bundle
|
||||
- [`onResolve()`](#onresolve): Run before a module is resolved
|
||||
- [`onLoad()`](#onload): Run before a module is loaded.
|
||||
- [`onEnd()`](#onend): Run after the bundle has completed
|
||||
- [`onBeforeParse()`](#onbeforeparse): Run zero-copy native addons in the parser thread before a file is parsed.
|
||||
|
||||
### Reference
|
||||
@@ -19,7 +18,6 @@ A rough overview of the types (please refer to Bun's `bun.d.ts` for the full typ
|
||||
```ts
|
||||
type PluginBuilder = {
|
||||
onStart(callback: () => void): void;
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
onResolve: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
@@ -287,53 +285,6 @@ plugin({
|
||||
|
||||
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
|
||||
|
||||
### `onEnd`
|
||||
|
||||
```ts
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
```
|
||||
|
||||
Registers a callback to be run when the bundler completes a bundle (whether successful or not).
|
||||
|
||||
The callback receives the `BuildOutput` object containing:
|
||||
|
||||
- `success`: boolean indicating if the build succeeded
|
||||
- `outputs`: array of generated build artifacts
|
||||
- `logs`: array of build messages (warnings, errors, etc.)
|
||||
|
||||
This is useful for post-processing, cleanup, notifications, or custom error handling.
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./out",
|
||||
plugins: [
|
||||
{
|
||||
name: "onEnd example",
|
||||
setup(build) {
|
||||
build.onEnd(result => {
|
||||
if (result.success) {
|
||||
console.log(
|
||||
`✅ Build succeeded with ${result.outputs.length} outputs`,
|
||||
);
|
||||
} else {
|
||||
console.error(`❌ Build failed with ${result.logs.length} errors`);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
The `onEnd` callbacks are called:
|
||||
|
||||
- **Before** the build promise resolves or rejects
|
||||
- **After** all bundling is complete
|
||||
- **In the order** they were registered
|
||||
|
||||
Multiple plugins can register `onEnd` callbacks, and they will all be called sequentially. If an `onEnd` callback returns a promise, the build will wait for it to resolve before continuing.
|
||||
|
||||
## Native plugins
|
||||
|
||||
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
|
||||
|
||||
@@ -245,8 +245,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--jsx-side-effects`
|
||||
- `--jsx-side-effects`
|
||||
- Controls whether JSX expressions are marked as `/* @__PURE__ */` for dead code elimination. Default is `false` (JSX marked as pure).
|
||||
- n/a
|
||||
- JSX is always assumed to be side-effect-free
|
||||
|
||||
---
|
||||
|
||||
@@ -617,7 +617,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
- `jsxSideEffects`
|
||||
- `jsxSideEffects`
|
||||
- Controls whether JSX expressions are marked as pure for dead code elimination
|
||||
- Not supported in JS API, configure in `tsconfig.json`
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -176,21 +176,7 @@ When a `bun.lock` exists and `package.json` hasn’t changed, Bun downloads miss
|
||||
|
||||
## Platform-specific dependencies?
|
||||
|
||||
bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won't change between platforms/architectures even if the packages ultimately installed do change.
|
||||
|
||||
### `--cpu` and `--os` flags
|
||||
|
||||
You can override the target platform for package selection:
|
||||
|
||||
```bash
|
||||
bun install --cpu=x64 --os=linux
|
||||
```
|
||||
|
||||
This installs packages for the specified platform instead of the current system. Useful for cross-platform builds or when preparing deployments for different environments.
|
||||
|
||||
**Accepted values for `--cpu`**: `arm64`, `x64`, `ia32`, `ppc64`, `s390x`
|
||||
|
||||
**Accepted values for `--os`**: `linux`, `darwin`, `win32`, `freebsd`, `openbsd`, `sunos`, `aix`
|
||||
bun stores normalized `cpu` and `os` values from npm in the lockfile, along with the resolved packages. It skips downloading, extracting, and installing packages disabled for the current target at runtime. This means the lockfile won’t change between platforms/architectures even if the packages ultimately installed do change.
|
||||
|
||||
## Peer dependencies?
|
||||
|
||||
@@ -244,106 +230,19 @@ $ bun install --backend copyfile
|
||||
|
||||
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
|
||||
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
|
||||
```bash
|
||||
$ rm -rf node_modules
|
||||
$ bun install --backend symlink
|
||||
$ bun --preserve-symlinks ./my-file.js
|
||||
$ node --preserve-symlinks ./my-file.js # https://nodejs.org/api/cli.html#--preserve-symlinks
|
||||
```
|
||||
|
||||
Bun's runtime does not currently expose an equivalent of `--preserve-symlinks`, though the code for it does exist.
|
||||
|
||||
## npm registry metadata
|
||||
|
||||
bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk.
|
||||
You will see these files in `~/.bun/install/cache/*.npm`. The filename pattern is `${hash(packageName)}.npm`. It’s a hash so that extra directories don’t need to be created for scoped packages.
|
||||
|
||||
Bun's usage of `Cache-Control` ignores `Age`. This improves performance, but means bun may be about 5 minutes out of date to receive the latest package version metadata from npm.
|
||||
|
||||
## pnpm migration
|
||||
|
||||
Bun automatically migrates projects from pnpm to bun. When a `pnpm-lock.yaml` file is detected and no `bun.lock` file exists, Bun will automatically migrate the lockfile to `bun.lock` during installation. The original `pnpm-lock.yaml` file remains unmodified.
|
||||
|
||||
```bash
|
||||
bun install
|
||||
```
|
||||
|
||||
**Note**: Migration only runs when `bun.lock` is absent. There is currently no opt-out flag for pnpm migration.
|
||||
|
||||
The migration process handles:
|
||||
|
||||
### Lockfile Migration
|
||||
|
||||
- Converts `pnpm-lock.yaml` to `bun.lock` format
|
||||
- Preserves package versions and resolution information
|
||||
- Maintains dependency relationships and peer dependencies
|
||||
- Handles patched dependencies with integrity hashes
|
||||
|
||||
### Workspace Configuration
|
||||
|
||||
When a `pnpm-workspace.yaml` file exists, Bun migrates workspace settings to your root `package.json`:
|
||||
|
||||
```yaml
|
||||
# pnpm-workspace.yaml
|
||||
packages:
|
||||
- "apps/*"
|
||||
- "packages/*"
|
||||
|
||||
catalog:
|
||||
react: ^18.0.0
|
||||
typescript: ^5.0.0
|
||||
|
||||
catalogs:
|
||||
build:
|
||||
webpack: ^5.0.0
|
||||
babel: ^7.0.0
|
||||
```
|
||||
|
||||
The workspace packages list and catalogs are moved to the `workspaces` field in `package.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"workspaces": {
|
||||
"packages": ["apps/*", "packages/*"],
|
||||
"catalog": {
|
||||
"react": "^18.0.0",
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"catalogs": {
|
||||
"build": {
|
||||
"webpack": "^5.0.0",
|
||||
"babel": "^7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Catalog Dependencies
|
||||
|
||||
Dependencies using pnpm's `catalog:` protocol are preserved:
|
||||
|
||||
```json
|
||||
{
|
||||
"dependencies": {
|
||||
"react": "catalog:",
|
||||
"webpack": "catalog:build"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration Migration
|
||||
|
||||
The following pnpm configuration is migrated from both `pnpm-lock.yaml` and `pnpm-workspace.yaml`:
|
||||
|
||||
- **Overrides**: Moved from `pnpm.overrides` to root-level `overrides` in `package.json`
|
||||
- **Patched Dependencies**: Moved from `pnpm.patchedDependencies` to root-level `patchedDependencies` in `package.json`
|
||||
- **Workspace Overrides**: Applied from `pnpm-workspace.yaml` to root `package.json`
|
||||
|
||||
### Requirements
|
||||
|
||||
- Requires pnpm lockfile version 7 or higher
|
||||
- Workspace packages must have a `name` field in their `package.json`
|
||||
- All catalog entries referenced by dependencies must exist in the catalogs definition
|
||||
|
||||
After migration, you can safely remove `pnpm-lock.yaml` and `pnpm-workspace.yaml` files.
|
||||
|
||||
@@ -63,15 +63,6 @@ $ bunx --bun my-cli # good
|
||||
$ bunx my-cli --bun # bad
|
||||
```
|
||||
|
||||
## Package flag
|
||||
|
||||
**`--package <pkg>` or `-p <pkg>`** - Run binary from specific package. Useful when binary name differs from package name:
|
||||
|
||||
```bash
|
||||
bunx -p renovate renovate-config-validator
|
||||
bunx --package @angular/cli ng
|
||||
```
|
||||
|
||||
To force bun to always be used with a script, use a shebang.
|
||||
|
||||
```
|
||||
|
||||
@@ -33,11 +33,6 @@ It creates:
|
||||
- an entry point which defaults to `index.ts` unless any of `index.{tsx, jsx, js, mts, mjs}` exist or the `package.json` specifies a `module` or `main` field
|
||||
- a `README.md` file
|
||||
|
||||
AI Agent rules (disable with `$BUN_AGENT_RULE_DISABLED=1`):
|
||||
|
||||
- a `CLAUDE.md` file when Claude CLI is detected (disable with `CLAUDE_CODE_AGENT_RULE_DISABLED` env var)
|
||||
- a `.cursor/rules/*.mdc` file to guide [Cursor AI](https://cursor.sh) to use Bun instead of Node.js and npm when Cursor is detected
|
||||
|
||||
If you pass `-y` or `--yes`, it will assume you want to continue without asking questions.
|
||||
|
||||
At the end, it runs `bun install` to install `@types/bun`.
|
||||
|
||||
@@ -8,14 +8,6 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
|
||||
**💾 Disk efficient** — Bun install stores all packages in a global cache (`~/.bun/install/cache/`) and creates hardlinks (Linux) or copy-on-write clones (macOS) to `node_modules`. This means duplicate packages across projects point to the same underlying data, taking up virtually no extra disk space.
|
||||
|
||||
For more details, see [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% details summary="For Linux users" %}
|
||||
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
|
||||
@@ -215,12 +207,6 @@ Isolated installs create a central package store in `node_modules/.bun/` with sy
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Disk efficiency
|
||||
|
||||
Bun uses a global cache at `~/.bun/install/cache/` to minimize disk usage. Packages are stored once and linked to `node_modules` using hardlinks (Linux/Windows) or copy-on-write (macOS), so duplicate packages across projects don't consume additional disk space.
|
||||
|
||||
For complete documentation refer to [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
|
||||
@@ -44,47 +44,4 @@ You can also pass glob patterns to filter by workspace names:
|
||||
|
||||
{% bunOutdatedTerminal glob="{e,t}*" displayGlob="--filter='@monorepo/{types,cli}'" /%}
|
||||
|
||||
### Catalog Dependencies
|
||||
|
||||
`bun outdated` supports checking catalog dependencies defined in `package.json`:
|
||||
|
||||
```sh
|
||||
$ bun outdated -r
|
||||
┌────────────────────┬─────────┬─────────┬─────────┬────────────────────────────────┐
|
||||
│ Package │ Current │ Update │ Latest │ Workspace │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ body-parser │ 1.19.0 │ 1.19.0 │ 2.2.0 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ cors │ 2.8.0 │ 2.8.0 │ 2.8.5 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ chalk │ 4.0.0 │ 4.0.0 │ 5.6.2 │ @test/utils │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ uuid │ 8.0.0 │ 8.0.0 │ 13.0.0 │ @test/utils │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ axios │ 0.21.0 │ 0.21.0 │ 1.12.2 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ lodash │ 4.17.15 │ 4.17.15 │ 4.17.21 │ catalog (@test/app, @test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ react │ 17.0.0 │ 17.0.0 │ 19.1.1 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ react-dom │ 17.0.0 │ 17.0.0 │ 19.1.1 │ catalog (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ express │ 4.17.0 │ 4.17.0 │ 5.1.0 │ catalog (@test/shared) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ moment │ 2.24.0 │ 2.24.0 │ 2.30.1 │ catalog (@test/utils) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ @types/node (dev) │ 14.0.0 │ 14.0.0 │ 24.5.2 │ @test/shared │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ @types/react (dev) │ 17.0.0 │ 17.0.0 │ 19.1.15 │ catalog:testing (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ eslint (dev) │ 7.0.0 │ 7.0.0 │ 9.36.0 │ catalog:testing (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ typescript (dev) │ 4.9.5 │ 4.9.5 │ 5.9.2 │ catalog:build (@test/app) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ jest (dev) │ 26.0.0 │ 26.0.0 │ 30.2.0 │ catalog:testing (@test/shared) │
|
||||
├────────────────────┼─────────┼─────────┼─────────┼────────────────────────────────┤
|
||||
│ prettier (dev) │ 2.0.0 │ 2.0.0 │ 3.6.2 │ catalog:build (@test/utils) │
|
||||
└────────────────────┴─────────┴─────────┴─────────┴────────────────────────────────┘
|
||||
```
|
||||
|
||||
{% bunCLIUsage command="outdated" /%}
|
||||
|
||||
@@ -82,16 +82,6 @@ The `--dry-run` flag can be used to simulate the publish process without actuall
|
||||
$ bun publish --dry-run
|
||||
```
|
||||
|
||||
### `--tolerate-republish`
|
||||
|
||||
The `--tolerate-republish` flag makes `bun publish` exit with code 0 instead of code 1 when attempting to republish over an existing version number. This is useful in automated workflows where republishing the same version might occur and should not be treated as an error.
|
||||
|
||||
```sh
|
||||
$ bun publish --tolerate-republish
|
||||
```
|
||||
|
||||
Without this flag, attempting to publish a version that already exists will result in an error and exit code 1. With this flag, the command will exit successfully even when trying to republish an existing version.
|
||||
|
||||
### `--gzip-level`
|
||||
|
||||
Specify the level of gzip compression to use when packing the package. Only applies to `bun publish` without a tarball path argument. Values range from `0` to `9` (default is `9`).
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user