mirror of
https://github.com/oven-sh/bun
synced 2026-02-03 15:38:46 +00:00
Compare commits
182 Commits
jarred/spa
...
dylan/byte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ad6973fe4 | ||
|
|
caef7fceea | ||
|
|
d919a76dd6 | ||
|
|
973fa98796 | ||
|
|
b7a6087d71 | ||
|
|
55230c16e6 | ||
|
|
e2161e7e13 | ||
|
|
d5431fcfe6 | ||
|
|
b04f98885f | ||
|
|
1779ee807c | ||
|
|
42cec2f0e2 | ||
|
|
5b7fd9ed0e | ||
|
|
ed9353f95e | ||
|
|
4573b5b844 | ||
|
|
5a75bcde13 | ||
|
|
afc5f50237 | ||
|
|
ca8d8065ec | ||
|
|
0bcb3137d3 | ||
|
|
b79bbfe289 | ||
|
|
72490281e5 | ||
|
|
60ab798991 | ||
|
|
e1de7563e1 | ||
|
|
3d361c8b49 | ||
|
|
0759da233f | ||
|
|
9978424177 | ||
|
|
d42f536a74 | ||
|
|
f78d197523 | ||
|
|
80fb7c7375 | ||
|
|
e2bfeefc9d | ||
|
|
cff2c2690b | ||
|
|
d0272d4a98 | ||
|
|
48ebc15e63 | ||
|
|
2e8e7a000c | ||
|
|
c1584b8a35 | ||
|
|
a0f13ea5bb | ||
|
|
c2bd4095eb | ||
|
|
0a7313e66c | ||
|
|
83293ea50c | ||
|
|
de7c947161 | ||
|
|
033c977fea | ||
|
|
d957a81c0a | ||
|
|
0b98086c3d | ||
|
|
f6c5318560 | ||
|
|
ad1fa514ed | ||
|
|
24c43c8f4d | ||
|
|
d69eb3ca00 | ||
|
|
3f53add5f1 | ||
|
|
fcaff77ed7 | ||
|
|
25c61fcd5a | ||
|
|
d0b5f9b587 | ||
|
|
1400e05e11 | ||
|
|
c8e3a91602 | ||
|
|
2e5f7f10ae | ||
|
|
559c95ee2c | ||
|
|
262f8863cb | ||
|
|
05f5ea0070 | ||
|
|
46ce975175 | ||
|
|
fa4822f8b8 | ||
|
|
8881e671d4 | ||
|
|
97d55411de | ||
|
|
f247277375 | ||
|
|
b93468ca48 | ||
|
|
35e9f3d4a2 | ||
|
|
9142cdcb1a | ||
|
|
822445d922 | ||
|
|
a34e10db53 | ||
|
|
684f7ecd09 | ||
|
|
d189759576 | ||
|
|
e395dec309 | ||
|
|
ff6af0e2f7 | ||
|
|
1085908386 | ||
|
|
a56488f221 | ||
|
|
fe8f8242fd | ||
|
|
c69ed120e9 | ||
|
|
edea077947 | ||
|
|
669b34ff6c | ||
|
|
eb7727819a | ||
|
|
84604888e9 | ||
|
|
6286824e28 | ||
|
|
dcb51bda60 | ||
|
|
36e2870fc8 | ||
|
|
5ac0a9a95c | ||
|
|
448fad8213 | ||
|
|
0315c97e7b | ||
|
|
3545cca8cc | ||
|
|
b199333f17 | ||
|
|
c0ba7e9e34 | ||
|
|
d4e614da8e | ||
|
|
b96980a95d | ||
|
|
1dd5761daa | ||
|
|
196182f8ec | ||
|
|
a3fcfd3963 | ||
|
|
54b90213eb | ||
|
|
fd69af7356 | ||
|
|
3ed06e3ddf | ||
|
|
437e15bae5 | ||
|
|
300f486125 | ||
|
|
fe7dfbb615 | ||
|
|
26c0f324f8 | ||
|
|
00722626fa | ||
|
|
2d6c67ffc0 | ||
|
|
e577a965ac | ||
|
|
654d33620a | ||
|
|
b99bbe7ee4 | ||
|
|
ec2e2993f5 | ||
|
|
d3abdc489e | ||
|
|
7c45ed97de | ||
|
|
a7586212eb | ||
|
|
8c3278b50d | ||
|
|
8bc2959a52 | ||
|
|
d2b37a575f | ||
|
|
fe3cbce1f0 | ||
|
|
f718f4a312 | ||
|
|
c0eebd7523 | ||
|
|
85770596ca | ||
|
|
b6613beaa2 | ||
|
|
404ac7fe9d | ||
|
|
707fc4c3a2 | ||
|
|
8fad98ffdb | ||
|
|
75f0ac4395 | ||
|
|
c342453065 | ||
|
|
7717693c70 | ||
|
|
790e5d4a7e | ||
|
|
f99efe398d | ||
|
|
b2351bbb4e | ||
|
|
d7bf8210eb | ||
|
|
92b38fdf80 | ||
|
|
e3e8d15263 | ||
|
|
73fe9a4484 | ||
|
|
0e37dc4e78 | ||
|
|
cca10d4530 | ||
|
|
ecbf103bf5 | ||
|
|
efdbe3b54f | ||
|
|
97495a86fe | ||
|
|
5b972fa2b4 | ||
|
|
87a5fac697 | ||
|
|
ede4ba567b | ||
|
|
b1417f494d | ||
|
|
d354714791 | ||
|
|
e7672b2d04 | ||
|
|
7110dc10a4 | ||
|
|
784271f85e | ||
|
|
9b363e4ef6 | ||
|
|
e6f6a487a1 | ||
|
|
2d86f46e07 | ||
|
|
f5ef9cda3c | ||
|
|
9ad5d3c6c3 | ||
|
|
decf84c416 | ||
|
|
2d36588609 | ||
|
|
67e44e25e8 | ||
|
|
d1562a7670 | ||
|
|
287c7adf76 | ||
|
|
0a13653707 | ||
|
|
a2c252256c | ||
|
|
45c34beb65 | ||
|
|
a2da900e40 | ||
|
|
883453391f | ||
|
|
0dc136149d | ||
|
|
8526b2512e | ||
|
|
29068c2118 | ||
|
|
b47d0bf960 | ||
|
|
e7aa548c42 | ||
|
|
cf868fd4c6 | ||
|
|
5fd3a42fe6 | ||
|
|
fecfe8082b | ||
|
|
57f799b6c2 | ||
|
|
f5077d6f7b | ||
|
|
2112ef5801 | ||
|
|
e020d2d953 | ||
|
|
586805ddb6 | ||
|
|
a25d7a8450 | ||
|
|
e5e9734c02 | ||
|
|
151cc59d53 | ||
|
|
dd7a639a6f | ||
|
|
99c3824b31 | ||
|
|
3cb1b5c7dd | ||
|
|
ecd74ac14c | ||
|
|
599947de28 | ||
|
|
53a3a67a0f | ||
|
|
50eaa755c7 | ||
|
|
0e13449e60 | ||
|
|
255a3dbd04 |
@@ -434,11 +434,17 @@ function getBuildEnv(target, options) {
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getBuildCommand(target, options) {
|
||||
function getBuildCommand(target, options, label) {
|
||||
const { profile } = target;
|
||||
const buildProfile = profile || "release";
|
||||
|
||||
const label = profile || "release";
|
||||
return `bun run build:${label}`;
|
||||
if (target.os === "windows" && label === "build-bun") {
|
||||
// Only sign release builds, not canary builds (DigiCert charges per signature)
|
||||
const enableSigning = !options.canary ? " -DENABLE_WINDOWS_CODESIGNING=ON" : "";
|
||||
return `bun run build:${buildProfile}${enableSigning}`;
|
||||
}
|
||||
|
||||
return `bun run build:${buildProfile}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -534,7 +540,7 @@ function getLinkBunStep(platform, options) {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
},
|
||||
command: `${getBuildCommand(platform, options)} --target bun`,
|
||||
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
464
.buildkite/scripts/sign-windows.ps1
Normal file
464
.buildkite/scripts/sign-windows.ps1
Normal file
@@ -0,0 +1,464 @@
|
||||
# Windows Code Signing Script for Bun
|
||||
# Uses DigiCert KeyLocker for Authenticode signing
|
||||
# Native PowerShell implementation - no path translation issues
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunProfileExe,
|
||||
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$BunExe
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ProgressPreference = "SilentlyContinue"
|
||||
|
||||
# Logging functions
|
||||
function Log-Info {
|
||||
param([string]$Message)
|
||||
Write-Host "[INFO] $Message" -ForegroundColor Cyan
|
||||
}
|
||||
|
||||
function Log-Success {
|
||||
param([string]$Message)
|
||||
Write-Host "[SUCCESS] $Message" -ForegroundColor Green
|
||||
}
|
||||
|
||||
function Log-Error {
|
||||
param([string]$Message)
|
||||
Write-Host "[ERROR] $Message" -ForegroundColor Red
|
||||
}
|
||||
|
||||
function Log-Debug {
|
||||
param([string]$Message)
|
||||
if ($env:DEBUG -eq "true" -or $env:DEBUG -eq "1") {
|
||||
Write-Host "[DEBUG] $Message" -ForegroundColor Gray
|
||||
}
|
||||
}
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
if ($null -eq $vsDir) {
|
||||
throw "Visual Studio directory not found."
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
}
|
||||
|
||||
# Check for required environment variables
|
||||
function Check-Environment {
|
||||
Log-Info "Checking environment variables..."
|
||||
|
||||
$required = @{
|
||||
"SM_API_KEY" = $env:SM_API_KEY
|
||||
"SM_CLIENT_CERT_PASSWORD" = $env:SM_CLIENT_CERT_PASSWORD
|
||||
"SM_KEYPAIR_ALIAS" = $env:SM_KEYPAIR_ALIAS
|
||||
"SM_HOST" = $env:SM_HOST
|
||||
"SM_CLIENT_CERT_FILE" = $env:SM_CLIENT_CERT_FILE
|
||||
}
|
||||
|
||||
$missing = @()
|
||||
foreach ($key in $required.Keys) {
|
||||
if ([string]::IsNullOrEmpty($required[$key])) {
|
||||
$missing += $key
|
||||
} else {
|
||||
Log-Debug "$key is set (length: $($required[$key].Length))"
|
||||
}
|
||||
}
|
||||
|
||||
if ($missing.Count -gt 0) {
|
||||
throw "Missing required environment variables: $($missing -join ', ')"
|
||||
}
|
||||
|
||||
Log-Success "All required environment variables are present"
|
||||
}
|
||||
|
||||
# Setup certificate file
|
||||
function Setup-Certificate {
|
||||
Log-Info "Setting up certificate..."
|
||||
|
||||
# Always try to decode as base64 first
|
||||
# If it fails, then treat as file path
|
||||
try {
|
||||
Log-Info "Attempting to decode certificate as base64..."
|
||||
Log-Debug "Input string length: $($env:SM_CLIENT_CERT_FILE.Length) characters"
|
||||
|
||||
$tempCertPath = Join-Path $env:TEMP "digicert_cert_$(Get-Random).p12"
|
||||
|
||||
# Try to decode as base64
|
||||
$certBytes = [System.Convert]::FromBase64String($env:SM_CLIENT_CERT_FILE)
|
||||
[System.IO.File]::WriteAllBytes($tempCertPath, $certBytes)
|
||||
|
||||
# Validate the decoded certificate size
|
||||
$fileSize = (Get-Item $tempCertPath).Length
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Decoded certificate too small: $fileSize bytes (expected >100 bytes)"
|
||||
}
|
||||
|
||||
# Update environment to point to file
|
||||
$env:SM_CLIENT_CERT_FILE = $tempCertPath
|
||||
|
||||
Log-Success "Certificate decoded and written to: $tempCertPath"
|
||||
Log-Debug "Decoded certificate file size: $fileSize bytes"
|
||||
|
||||
# Register cleanup
|
||||
$global:TEMP_CERT_PATH = $tempCertPath
|
||||
|
||||
} catch {
|
||||
# If base64 decode fails, check if it's a file path
|
||||
Log-Info "Base64 decode failed, checking if it's a file path..."
|
||||
Log-Debug "Decode error: $_"
|
||||
|
||||
if (Test-Path $env:SM_CLIENT_CERT_FILE) {
|
||||
$fileSize = (Get-Item $env:SM_CLIENT_CERT_FILE).Length
|
||||
|
||||
# Validate file size
|
||||
if ($fileSize -lt 100) {
|
||||
throw "Certificate file too small: $fileSize bytes at $env:SM_CLIENT_CERT_FILE (possibly corrupted)"
|
||||
}
|
||||
|
||||
Log-Info "Using certificate file: $env:SM_CLIENT_CERT_FILE"
|
||||
Log-Debug "Certificate file size: $fileSize bytes"
|
||||
} else {
|
||||
throw "SM_CLIENT_CERT_FILE is neither valid base64 nor an existing file: $env:SM_CLIENT_CERT_FILE"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Install DigiCert KeyLocker tools
|
||||
function Install-KeyLocker {
|
||||
Log-Info "Setting up DigiCert KeyLocker tools..."
|
||||
|
||||
# Define our controlled installation directory
|
||||
$installDir = "C:\BuildTools\DigiCert"
|
||||
$smctlPath = Join-Path $installDir "smctl.exe"
|
||||
|
||||
# Check if already installed in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools already installed at: $smctlPath"
|
||||
|
||||
# Add to PATH if not already there
|
||||
if ($env:PATH -notlike "*$installDir*") {
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
}
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
Log-Info "Installing KeyLocker tools to: $installDir"
|
||||
|
||||
# Create the installation directory if it doesn't exist
|
||||
if (!(Test-Path $installDir)) {
|
||||
Log-Info "Creating installation directory: $installDir"
|
||||
try {
|
||||
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
|
||||
Log-Success "Created directory: $installDir"
|
||||
} catch {
|
||||
throw "Failed to create directory $installDir : $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
try {
|
||||
# Remove existing MSI if present
|
||||
if (Test-Path $msiPath) {
|
||||
Remove-Item $msiPath -Force
|
||||
Log-Debug "Removed existing MSI file"
|
||||
}
|
||||
|
||||
# Download with progress tracking
|
||||
$webClient = New-Object System.Net.WebClient
|
||||
$webClient.DownloadFile($msiUrl, $msiPath)
|
||||
|
||||
if (!(Test-Path $msiPath)) {
|
||||
throw "MSI download failed - file not found"
|
||||
}
|
||||
|
||||
$fileSize = (Get-Item $msiPath).Length
|
||||
Log-Success "MSI downloaded successfully (size: $fileSize bytes)"
|
||||
|
||||
} catch {
|
||||
throw "Failed to download MSI: $_"
|
||||
}
|
||||
|
||||
# Install MSI
|
||||
Log-Info "Installing MSI..."
|
||||
Log-Debug "MSI path: $msiPath"
|
||||
Log-Debug "File exists: $(Test-Path $msiPath)"
|
||||
Log-Debug "File size: $((Get-Item $msiPath).Length) bytes"
|
||||
|
||||
# Check if running as administrator
|
||||
$isAdmin = ([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
|
||||
Log-Info "Running as administrator: $isAdmin"
|
||||
|
||||
# Install MSI silently to our controlled directory
|
||||
$arguments = @(
|
||||
"/i", "`"$msiPath`"",
|
||||
"/quiet",
|
||||
"/norestart",
|
||||
"TARGETDIR=`"$installDir`"",
|
||||
"INSTALLDIR=`"$installDir`"",
|
||||
"ACCEPT_EULA=1",
|
||||
"ADDLOCAL=ALL"
|
||||
)
|
||||
|
||||
Log-Debug "Running: msiexec.exe $($arguments -join ' ')"
|
||||
Log-Info "Installing to: $installDir"
|
||||
|
||||
$process = Start-Process -FilePath "msiexec.exe" -ArgumentList $arguments -Wait -PassThru -NoNewWindow
|
||||
|
||||
if ($process.ExitCode -ne 0) {
|
||||
Log-Error "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
|
||||
# Try to get error details from event log
|
||||
try {
|
||||
$events = Get-WinEvent -LogName "Application" -MaxEvents 10 |
|
||||
Where-Object { $_.ProviderName -eq "MsiInstaller" -and $_.TimeCreated -gt (Get-Date).AddMinutes(-1) }
|
||||
|
||||
foreach ($event in $events) {
|
||||
Log-Debug "MSI Event: $($event.Message)"
|
||||
}
|
||||
} catch {
|
||||
Log-Debug "Could not retrieve MSI installation events"
|
||||
}
|
||||
|
||||
throw "MSI installation failed with exit code: $($process.ExitCode)"
|
||||
}
|
||||
|
||||
Log-Success "MSI installation completed"
|
||||
|
||||
# Wait for installation to complete
|
||||
Start-Sleep -Seconds 2
|
||||
|
||||
# Verify smctl.exe exists in our controlled location
|
||||
if (Test-Path $smctlPath) {
|
||||
Log-Success "KeyLocker tools installed successfully at: $smctlPath"
|
||||
|
||||
# Add to PATH
|
||||
$env:PATH = "$installDir;$env:PATH"
|
||||
Log-Info "Added to PATH: $installDir"
|
||||
|
||||
return $smctlPath
|
||||
}
|
||||
|
||||
# If not in our expected location, check if it installed somewhere in the directory
|
||||
$found = Get-ChildItem -Path $installDir -Filter "smctl.exe" -Recurse -ErrorAction SilentlyContinue |
|
||||
Select-Object -First 1
|
||||
|
||||
if ($found) {
|
||||
Log-Success "Found smctl.exe at: $($found.FullName)"
|
||||
$smctlDir = $found.DirectoryName
|
||||
$env:PATH = "$smctlDir;$env:PATH"
|
||||
return $found.FullName
|
||||
}
|
||||
|
||||
throw "KeyLocker tools installation succeeded but smctl.exe not found in $installDir"
|
||||
}
|
||||
|
||||
# Configure KeyLocker
|
||||
function Configure-KeyLocker {
|
||||
param([string]$SmctlPath)
|
||||
|
||||
Log-Info "Configuring KeyLocker..."
|
||||
|
||||
# Verify smctl is accessible
|
||||
try {
|
||||
$version = & $SmctlPath --version 2>&1
|
||||
Log-Debug "smctl version: $version"
|
||||
} catch {
|
||||
throw "Failed to run smctl: $_"
|
||||
}
|
||||
|
||||
# Configure KeyLocker credentials and environment
|
||||
Log-Info "Configuring KeyLocker credentials..."
|
||||
|
||||
try {
|
||||
# Save credentials (API key and password)
|
||||
Log-Info "Saving credentials to OS store..."
|
||||
$saveOutput = & $SmctlPath credentials save $env:SM_API_KEY $env:SM_CLIENT_CERT_PASSWORD 2>&1 | Out-String
|
||||
Log-Debug "Credentials save output: $saveOutput"
|
||||
|
||||
if ($saveOutput -like "*Credentials saved*") {
|
||||
Log-Success "Credentials saved successfully"
|
||||
}
|
||||
|
||||
# Set environment variables for smctl
|
||||
Log-Info "Setting KeyLocker environment variables..."
|
||||
$env:SM_HOST = $env:SM_HOST # Already set, but ensure it's available
|
||||
$env:SM_API_KEY = $env:SM_API_KEY # Already set
|
||||
$env:SM_CLIENT_CERT_FILE = $env:SM_CLIENT_CERT_FILE # Path to decoded cert file
|
||||
Log-Debug "SM_HOST: $env:SM_HOST"
|
||||
Log-Debug "SM_CLIENT_CERT_FILE: $env:SM_CLIENT_CERT_FILE"
|
||||
|
||||
# Run health check
|
||||
Log-Info "Running KeyLocker health check..."
|
||||
$healthOutput = & $SmctlPath healthcheck 2>&1 | Out-String
|
||||
Log-Debug "Health check output: $healthOutput"
|
||||
|
||||
if ($healthOutput -like "*Healthy*" -or $healthOutput -like "*SUCCESS*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "KeyLocker health check passed"
|
||||
} else {
|
||||
Log-Error "Health check failed: $healthOutput"
|
||||
# Don't throw here, sometimes healthcheck is flaky but signing still works
|
||||
}
|
||||
|
||||
# Sync certificates to Windows certificate store
|
||||
Log-Info "Syncing certificates to Windows store..."
|
||||
$syncOutput = & $SmctlPath windows certsync 2>&1 | Out-String
|
||||
Log-Debug "Certificate sync output: $syncOutput"
|
||||
|
||||
if ($syncOutput -like "*success*" -or $syncOutput -like "*synced*" -or $LASTEXITCODE -eq 0) {
|
||||
Log-Success "Certificates synced to Windows store"
|
||||
} else {
|
||||
Log-Info "Certificate sync output: $syncOutput"
|
||||
}
|
||||
|
||||
} catch {
|
||||
throw "Failed to configure KeyLocker: $_"
|
||||
}
|
||||
}
|
||||
|
||||
# Sign an executable
|
||||
function Sign-Executable {
|
||||
param(
|
||||
[string]$ExePath,
|
||||
[string]$SmctlPath
|
||||
)
|
||||
|
||||
if (!(Test-Path $ExePath)) {
|
||||
throw "Executable not found: $ExePath"
|
||||
}
|
||||
|
||||
$fileName = Split-Path $ExePath -Leaf
|
||||
Log-Info "Signing $fileName..."
|
||||
Log-Debug "Full path: $ExePath"
|
||||
Log-Debug "File size: $((Get-Item $ExePath).Length) bytes"
|
||||
|
||||
# Check if already signed
|
||||
$existingSig = Get-AuthenticodeSignature $ExePath
|
||||
if ($existingSig.Status -eq "Valid") {
|
||||
Log-Info "$fileName is already signed by: $($existingSig.SignerCertificate.Subject)"
|
||||
Log-Info "Skipping re-signing"
|
||||
return
|
||||
}
|
||||
|
||||
# Sign the executable using smctl
|
||||
try {
|
||||
# smctl sign command with keypair-alias
|
||||
$signArgs = @(
|
||||
"sign",
|
||||
"--keypair-alias", $env:SM_KEYPAIR_ALIAS,
|
||||
"--input", $ExePath,
|
||||
"--verbose"
|
||||
)
|
||||
|
||||
Log-Debug "Running: $SmctlPath $($signArgs -join ' ')"
|
||||
|
||||
$signOutput = & $SmctlPath $signArgs 2>&1 | Out-String
|
||||
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Log-Error "Signing output: $signOutput"
|
||||
throw "Signing failed with exit code: $LASTEXITCODE"
|
||||
}
|
||||
|
||||
Log-Debug "Signing output: $signOutput"
|
||||
Log-Success "Signing command completed"
|
||||
|
||||
} catch {
|
||||
throw "Failed to sign $fileName : $_"
|
||||
}
|
||||
|
||||
# Verify signature
|
||||
$newSig = Get-AuthenticodeSignature $ExePath
|
||||
|
||||
if ($newSig.Status -eq "Valid") {
|
||||
Log-Success "$fileName signed successfully"
|
||||
Log-Info "Signed by: $($newSig.SignerCertificate.Subject)"
|
||||
Log-Info "Thumbprint: $($newSig.SignerCertificate.Thumbprint)"
|
||||
Log-Info "Valid from: $($newSig.SignerCertificate.NotBefore) to $($newSig.SignerCertificate.NotAfter)"
|
||||
} else {
|
||||
throw "$fileName signature verification failed: $($newSig.Status) - $($newSig.StatusMessage)"
|
||||
}
|
||||
}
|
||||
|
||||
# Cleanup function
|
||||
function Cleanup {
|
||||
if ($global:TEMP_CERT_PATH -and (Test-Path $global:TEMP_CERT_PATH)) {
|
||||
try {
|
||||
Remove-Item $global:TEMP_CERT_PATH -Force
|
||||
Log-Info "Cleaned up temporary certificate"
|
||||
} catch {
|
||||
Log-Error "Failed to cleanup temporary certificate: $_"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Main execution
|
||||
try {
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
Write-Host " Windows Code Signing for Bun" -ForegroundColor Cyan
|
||||
Write-Host "========================================" -ForegroundColor Cyan
|
||||
|
||||
# Ensure we're in a VS environment
|
||||
Ensure-VSEnvironment
|
||||
|
||||
# Check environment variables
|
||||
Check-Environment
|
||||
|
||||
# Setup certificate
|
||||
Setup-Certificate
|
||||
|
||||
# Install and configure KeyLocker
|
||||
$smctlPath = Install-KeyLocker
|
||||
Configure-KeyLocker -SmctlPath $smctlPath
|
||||
|
||||
# Sign both executables
|
||||
Sign-Executable -ExePath $BunProfileExe -SmctlPath $smctlPath
|
||||
Sign-Executable -ExePath $BunExe -SmctlPath $smctlPath
|
||||
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
Write-Host " Code signing completed successfully!" -ForegroundColor Green
|
||||
Write-Host "========================================" -ForegroundColor Green
|
||||
|
||||
exit 0
|
||||
|
||||
} catch {
|
||||
Log-Error "Code signing failed: $_"
|
||||
exit 1
|
||||
|
||||
} finally {
|
||||
Cleanup
|
||||
}
|
||||
4
.github/workflows/auto-label-claude-prs.yml
vendored
4
.github/workflows/auto-label-claude-prs.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
|
||||
jobs:
|
||||
auto-label:
|
||||
if: github.event.pull_request.user.login == 'robobun'
|
||||
if: github.event.pull_request.user.login == 'robobun' || contains(github.event.pull_request.body, '🤖 Generated with')
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -21,4 +21,4 @@ jobs:
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['claude']
|
||||
});
|
||||
});
|
||||
|
||||
48
.github/workflows/claude.yml
vendored
48
.github/workflows/claude.yml
vendored
@@ -13,23 +13,55 @@ on:
|
||||
jobs:
|
||||
claude:
|
||||
if: |
|
||||
github.repository == 'oven-sh/bun' &&
|
||||
(
|
||||
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
|
||||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
|
||||
) &&
|
||||
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
|
||||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
|
||||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: claude
|
||||
env:
|
||||
IS_SANDBOX: 1
|
||||
container:
|
||||
image: localhost:5000/claude-bun:latest
|
||||
options: --privileged --user 1000:1000
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
working-directory: /workspace/bun
|
||||
run: |
|
||||
git config --global user.email "claude-bot@bun.sh" && \
|
||||
git config --global user.name "Claude Bot" && \
|
||||
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
|
||||
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
|
||||
git config --global --add safe.directory /workspace/bun && \
|
||||
git config --global push.default current && \
|
||||
git config --global pull.rebase true && \
|
||||
git config --global init.defaultBranch main && \
|
||||
git config --global core.editor "vim" && \
|
||||
git config --global color.ui auto && \
|
||||
git config --global fetch.prune true && \
|
||||
git config --global diff.colorMoved zebra && \
|
||||
git config --global merge.conflictStyle diff3 && \
|
||||
git config --global rerere.enabled true && \
|
||||
git config --global core.autocrlf input
|
||||
git fetch origin ${{ github.event.pull_request.head.sha }}
|
||||
git checkout ${{ github.event.pull_request.head.ref }}
|
||||
git reset --hard origin/${{ github.event.pull_request.head.ref }}
|
||||
- name: Run Claude Code
|
||||
id: claude
|
||||
uses: anthropics/claude-code-action@beta
|
||||
# TODO: switch this out once they merge their v1
|
||||
uses: km-anthropic/claude-code-action@v1-dev
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
|
||||
timeout_minutes: "180"
|
||||
claude_args: |
|
||||
--dangerously-skip-permissions
|
||||
--system-prompt "You are working on the Bun codebase"
|
||||
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
|
||||
|
||||
24
.github/workflows/format.yml
vendored
24
.github/workflows/format.yml
vendored
@@ -8,10 +8,8 @@ on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
push:
|
||||
branches: ["main"]
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
BUN_VERSION: "1.2.20"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
@@ -37,13 +35,14 @@ jobs:
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
bun scripts/glob-sources.mjs
|
||||
- name: Format Code
|
||||
run: |
|
||||
# Start prettier in background with prefixed output
|
||||
echo "::group::Prettier"
|
||||
(bun run prettier 2>&1 | sed 's/^/[prettier] /' || echo "[prettier] Failed with exit code $?") &
|
||||
PRETTIER_PID=$!
|
||||
|
||||
|
||||
# Start clang-format installation and formatting in background with prefixed output
|
||||
echo "::group::Clang-format"
|
||||
(
|
||||
@@ -56,13 +55,13 @@ jobs:
|
||||
LLVM_VERSION_MAJOR=${{ env.LLVM_VERSION_MAJOR }} ./scripts/run-clang-format.sh format 2>&1 | sed 's/^/[clang-format] /'
|
||||
) &
|
||||
CLANG_PID=$!
|
||||
|
||||
|
||||
# Setup Zig in temp directory and run zig fmt in background with prefixed output
|
||||
echo "::group::Zig fmt"
|
||||
(
|
||||
ZIG_TEMP=$(mktemp -d)
|
||||
echo "[zig] Downloading Zig (musl build)..."
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-d1a4e0b0ddc75f37c6a090b97eef0cbb6335556e/bootstrap-x86_64-linux-musl.zip
|
||||
wget -q -O "$ZIG_TEMP/zig.zip" https://github.com/oven-sh/zig/releases/download/autobuild-e0b7c318f318196c5f81fdf3423816a7b5bb3112/bootstrap-x86_64-linux-musl.zip
|
||||
unzip -q -d "$ZIG_TEMP" "$ZIG_TEMP/zig.zip"
|
||||
export PATH="$ZIG_TEMP/bootstrap-x86_64-linux-musl:$PATH"
|
||||
echo "[zig] Running zig fmt..."
|
||||
@@ -72,38 +71,39 @@ jobs:
|
||||
rm -rf "$ZIG_TEMP"
|
||||
) &
|
||||
ZIG_PID=$!
|
||||
|
||||
|
||||
# Wait for all formatting tasks to complete
|
||||
echo ""
|
||||
echo "Running formatters in parallel..."
|
||||
FAILED=0
|
||||
|
||||
|
||||
if ! wait $PRETTIER_PID; then
|
||||
echo "::error::Prettier failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
if ! wait $CLANG_PID; then
|
||||
echo "::error::Clang-format failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
if ! wait $ZIG_PID; then
|
||||
echo "::error::Zig fmt failed"
|
||||
FAILED=1
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
|
||||
|
||||
# Exit with error if any formatter failed
|
||||
if [ $FAILED -eq 1 ]; then
|
||||
echo "::error::One or more formatters failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
echo "✅ All formatters completed successfully"
|
||||
- name: Ban Words
|
||||
run: |
|
||||
bun ./test/internal/ban-words.test.ts
|
||||
git rm -f cmake/sources/*.txt || true
|
||||
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27
|
||||
|
||||
41
.github/workflows/glob-sources.yml
vendored
41
.github/workflows/glob-sources.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: Glob Sources
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
|
||||
jobs:
|
||||
glob-sources:
|
||||
name: Glob Sources
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Glob sources
|
||||
run: bun scripts/glob-sources.mjs
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun scripts/glob-sources.mjs`"
|
||||
|
||||
41
.github/workflows/labeled.yml
vendored
41
.github/workflows/labeled.yml
vendored
@@ -5,6 +5,8 @@ env:
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
pull_request_target:
|
||||
types: [labeled, opened, reopened, synchronize, unlabeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
@@ -43,9 +45,46 @@ jobs:
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-slop:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'pull_request_target' && contains(github.event.pull_request.labels.*.name, 'slop')
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Update PR title and body for slop and close
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
title: 'ai slop',
|
||||
body: 'This PR has been marked as AI slop and the description has been updated to avoid confusion or misleading reviewers.\n\nMany AI PRs are fine, but sometimes they submit a PR too early, fail to test if the problem is real, fail to reproduce the problem, or fail to test that the problem is fixed. If you think this PR is not AI slop, please leave a comment.',
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Delete the branch if it's from a fork or if it's not a protected branch
|
||||
try {
|
||||
await github.rest.git.deleteRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: `heads/${pr.data.head.ref}`
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('Could not delete branch:', error.message);
|
||||
}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
if: github.event_name == 'issues' && (github.event.label.name == 'crash' || github.event.label.name == 'needs repro')
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -186,4 +186,7 @@ scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
|
||||
scripts/lldb-inline
|
||||
scripts/lldb-inline
|
||||
|
||||
# We regenerate these in all the build scripts
|
||||
cmake/sources/*.txt
|
||||
3
.vscode/launch.json
generated
vendored
3
.vscode/launch.json
generated
vendored
@@ -22,6 +22,9 @@
|
||||
"BUN_DEBUG_QUIET_LOGS": "1",
|
||||
"BUN_DEBUG_jest": "1",
|
||||
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
|
||||
// "BUN_JSC_validateExceptionChecks": "1",
|
||||
// "BUN_JSC_dumpSimulatedThrows": "1",
|
||||
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
|
||||
},
|
||||
"console": "internalConsole",
|
||||
"sourceMap": {
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -168,5 +168,5 @@
|
||||
"WebKit/WebInspectorUI": true,
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"bun.test.customScript": "./build/debug/bun-debug test"
|
||||
"bun.test.customScript": "./build/debug/bun-debug test",
|
||||
}
|
||||
|
||||
12
CLAUDE.md
12
CLAUDE.md
@@ -43,11 +43,11 @@ Tests use Bun's Jest-compatible test runner with proper test fixtures:
|
||||
|
||||
```typescript
|
||||
import { test, expect } from "bun:test";
|
||||
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
|
||||
import { bunEnv, bunExe, normalizeBunSnapshot, tempDir } from "harness";
|
||||
|
||||
test("my feature", async () => {
|
||||
// Create temp directory with test files
|
||||
const dir = tempDirWithFiles("test-prefix", {
|
||||
using dir = tempDir("test-prefix", {
|
||||
"index.js": `console.log("hello");`,
|
||||
});
|
||||
|
||||
@@ -55,7 +55,8 @@ test("my feature", async () => {
|
||||
await using proc = Bun.spawn({
|
||||
cmd: [bunExe(), "index.js"],
|
||||
env: bunEnv,
|
||||
cwd: dir,
|
||||
cwd: String(dir),
|
||||
stderr: "pipe",
|
||||
});
|
||||
|
||||
const [stdout, stderr, exitCode] = await Promise.all([
|
||||
@@ -65,11 +66,14 @@ test("my feature", async () => {
|
||||
]);
|
||||
|
||||
expect(exitCode).toBe(0);
|
||||
expect(stdout).toBe("hello\n");
|
||||
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
|
||||
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
|
||||
});
|
||||
```
|
||||
|
||||
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
|
||||
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
|
||||
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
|
||||
|
||||
## Code Architecture
|
||||
|
||||
|
||||
@@ -223,8 +223,8 @@ $ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
# Optionally, you can use `bun run jsc:build` for a release build
|
||||
$ bun run jsc:build:debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
116
bench/postMessage/postMessage-object.mjs
Normal file
116
bench/postMessage/postMessage-object.mjs
Normal file
@@ -0,0 +1,116 @@
|
||||
// Benchmark for object fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker } from "node:worker_threads";
|
||||
|
||||
const extraProperties = {
|
||||
a: "a!",
|
||||
b: "b!",
|
||||
"second": "c!",
|
||||
bool: true,
|
||||
nully: null,
|
||||
undef: undefined,
|
||||
int: 0,
|
||||
double: 1.234,
|
||||
falsy: false,
|
||||
};
|
||||
|
||||
const objects = {
|
||||
small: { property: "Hello world", ...extraProperties },
|
||||
medium: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
large: {
|
||||
property: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
...extraProperties,
|
||||
},
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
switch (data.property.length) {
|
||||
case ${objects.small.property.length}:
|
||||
case ${objects.medium.property.length}:
|
||||
case ${objects.large.property.length}: {
|
||||
if (
|
||||
data.a === "a!" &&
|
||||
data.b === "b!" &&
|
||||
data.second === "c!" &&
|
||||
data.bool === true &&
|
||||
data.nully === null &&
|
||||
data.undef === undefined &&
|
||||
data.int === 0 &&
|
||||
data.double === 1.234 &&
|
||||
data.falsy === false) {
|
||||
Atomics.add(int, 0, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
default: {
|
||||
throw new Error("Invalid data object: " + JSON.stringify(data));
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage({ prop: " + fmt(objects.small.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.small);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.medium.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.medium);
|
||||
});
|
||||
|
||||
bench("postMessage({ prop: " + fmt(objects.large.property.length) + " string, ...9 more props })", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(objects.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
77
bench/postMessage/postMessage-string.mjs
Normal file
77
bench/postMessage/postMessage-string.mjs
Normal file
@@ -0,0 +1,77 @@
|
||||
// Benchmark for string fast path optimization in postMessage with Workers
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
import { Worker, isMainThread, parentPort } from "node:worker_threads";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
|
||||
large: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
|
||||
};
|
||||
|
||||
let worker;
|
||||
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
|
||||
let sentCount = 0;
|
||||
|
||||
function createWorker() {
|
||||
const workerCode = `
|
||||
import { parentPort, workerData } from "node:worker_threads";
|
||||
|
||||
let int = workerData;
|
||||
|
||||
parentPort?.on("message", data => {
|
||||
Atomics.add(int, 0, 1);
|
||||
});
|
||||
`;
|
||||
|
||||
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
|
||||
|
||||
worker.on("message", confirmationId => {});
|
||||
|
||||
worker.on("error", error => {
|
||||
console.error("Worker error:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize worker before running benchmarks
|
||||
createWorker();
|
||||
|
||||
function fmt(int) {
|
||||
if (int < 1000) {
|
||||
return `${int} chars`;
|
||||
}
|
||||
|
||||
if (int < 100000) {
|
||||
return `${(int / 1024) | 0} KB`;
|
||||
}
|
||||
|
||||
return `${(int / 1024 / 1024) | 0} MB`;
|
||||
}
|
||||
|
||||
// Benchmark postMessage with pure strings (uses fast path)
|
||||
bench("postMessage(" + fmt(strings.small.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.small);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.medium.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.medium);
|
||||
});
|
||||
|
||||
bench("postMessage(" + fmt(strings.large.length) + " string)", async () => {
|
||||
sentCount++;
|
||||
worker.postMessage(strings.large);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 5000));
|
||||
|
||||
if (receivedCount[0] !== sentCount) {
|
||||
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
|
||||
}
|
||||
|
||||
// Cleanup worker
|
||||
worker?.terminate();
|
||||
56
bench/postMessage/structureClone-string.mjs
Normal file
56
bench/postMessage/structureClone-string.mjs
Normal file
@@ -0,0 +1,56 @@
|
||||
// Benchmark for string fast path optimization in postMessage and structuredClone
|
||||
|
||||
import { bench, run } from "mitata";
|
||||
|
||||
// Test strings of different sizes
|
||||
const strings = {
|
||||
small: "Hello world",
|
||||
medium: "Hello World!!!".repeat(1024).split("").join(""),
|
||||
large: "Hello World!!!".repeat(1024).repeat(1024).split("").join(""),
|
||||
};
|
||||
|
||||
console.log("String fast path benchmark");
|
||||
console.log("Comparing pure strings (fast path) vs objects containing strings (traditional)");
|
||||
console.log("For structuredClone, pure strings should have constant time regardless of size.");
|
||||
console.log("");
|
||||
|
||||
// Benchmark structuredClone with pure strings (uses fast path)
|
||||
bench("structuredClone small string (fast path)", () => {
|
||||
structuredClone(strings.small);
|
||||
});
|
||||
|
||||
bench("structuredClone medium string (fast path)", () => {
|
||||
structuredClone(strings.medium);
|
||||
});
|
||||
|
||||
bench("structuredClone large string (fast path)", () => {
|
||||
structuredClone(strings.large);
|
||||
});
|
||||
|
||||
// Benchmark structuredClone with objects containing strings (traditional path)
|
||||
bench("structuredClone object with small string", () => {
|
||||
structuredClone({ str: strings.small });
|
||||
});
|
||||
|
||||
bench("structuredClone object with medium string", () => {
|
||||
structuredClone({ str: strings.medium });
|
||||
});
|
||||
|
||||
bench("structuredClone object with large string", () => {
|
||||
structuredClone({ str: strings.large });
|
||||
});
|
||||
|
||||
// Multiple string cloning benchmark
|
||||
bench("structuredClone 100 small strings", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone(strings.small);
|
||||
}
|
||||
});
|
||||
|
||||
bench("structuredClone 100 small objects", () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
structuredClone({ str: strings.small });
|
||||
}
|
||||
});
|
||||
|
||||
await run();
|
||||
Binary file not shown.
58
bench/postgres/mysql.mjs
Normal file
58
bench/postgres/mysql.mjs
Normal file
@@ -0,0 +1,58 @@
|
||||
const isBun = typeof globalThis?.Bun?.sql !== "undefined";
|
||||
let conn;
|
||||
let sql;
|
||||
import * as mariadb from "mariadb";
|
||||
import * as mysql2 from "mysql2/promise";
|
||||
let useMYSQL2 = false;
|
||||
if (process.argv.includes("--mysql2")) {
|
||||
useMYSQL2 = true;
|
||||
}
|
||||
if (isBun) {
|
||||
sql = new Bun.SQL({
|
||||
adapter: "mysql",
|
||||
database: "test",
|
||||
username: "root",
|
||||
});
|
||||
} else {
|
||||
const pool = (useMYSQL2 ? mysql2 : mariadb).createPool({
|
||||
// Add your MariaDB connection details here
|
||||
user: "root",
|
||||
database: "test",
|
||||
});
|
||||
conn = await pool.getConnection();
|
||||
}
|
||||
|
||||
if (isBun) {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await sql`DROP TABLE IF EXISTS test100`;
|
||||
await sql`CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)`;
|
||||
await sql`INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)`;
|
||||
} else {
|
||||
// Initialize the benchmark table (equivalent to initFct)
|
||||
await conn.query("DROP TABLE IF EXISTS test100");
|
||||
await conn.query(
|
||||
"CREATE TABLE test100 (i1 int,i2 int,i3 int,i4 int,i5 int,i6 int,i7 int,i8 int,i9 int,i10 int,i11 int,i12 int,i13 int,i14 int,i15 int,i16 int,i17 int,i18 int,i19 int,i20 int,i21 int,i22 int,i23 int,i24 int,i25 int,i26 int,i27 int,i28 int,i29 int,i30 int,i31 int,i32 int,i33 int,i34 int,i35 int,i36 int,i37 int,i38 int,i39 int,i40 int,i41 int,i42 int,i43 int,i44 int,i45 int,i46 int,i47 int,i48 int,i49 int,i50 int,i51 int,i52 int,i53 int,i54 int,i55 int,i56 int,i57 int,i58 int,i59 int,i60 int,i61 int,i62 int,i63 int,i64 int,i65 int,i66 int,i67 int,i68 int,i69 int,i70 int,i71 int,i72 int,i73 int,i74 int,i75 int,i76 int,i77 int,i78 int,i79 int,i80 int,i81 int,i82 int,i83 int,i84 int,i85 int,i86 int,i87 int,i88 int,i89 int,i90 int,i91 int,i92 int,i93 int,i94 int,i95 int,i96 int,i97 int,i98 int,i99 int,i100 int)",
|
||||
);
|
||||
await conn.query(
|
||||
"INSERT INTO test100 value (1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)",
|
||||
);
|
||||
}
|
||||
// Run the benchmark (equivalent to benchFct)
|
||||
const type = isBun ? "Bun.SQL" : useMYSQL2 ? "mysql2" : "mariadb";
|
||||
console.time(type);
|
||||
let promises = [];
|
||||
|
||||
for (let i = 0; i < 100_000; i++) {
|
||||
if (isBun) {
|
||||
promises.push(sql`select * FROM test100`);
|
||||
} else {
|
||||
promises.push(conn.query("select * FROM test100"));
|
||||
}
|
||||
}
|
||||
await Promise.all(promises);
|
||||
console.timeEnd(type);
|
||||
|
||||
// Clean up connection
|
||||
if (!isBun && conn.release) {
|
||||
conn.release();
|
||||
}
|
||||
@@ -9,6 +9,8 @@
|
||||
"typescript": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"mariadb": "^3.4.5",
|
||||
"mysql2": "^3.14.3",
|
||||
"postgres": "^3.4.7"
|
||||
}
|
||||
}
|
||||
@@ -12,6 +12,9 @@ const scenarios = [
|
||||
{ alg: "sha1", digest: "base64" },
|
||||
{ alg: "sha256", digest: "hex" },
|
||||
{ alg: "sha256", digest: "base64" },
|
||||
{ alg: "blake2b512", digest: "hex" },
|
||||
{ alg: "sha512-224", digest: "hex" },
|
||||
{ alg: "sha512-256", digest: "hex" },
|
||||
];
|
||||
|
||||
for (const { alg, digest } of scenarios) {
|
||||
@@ -23,6 +26,10 @@ for (const { alg, digest } of scenarios) {
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher)`, () => {
|
||||
new Bun.CryptoHasher(alg).update(data).digest(digest);
|
||||
});
|
||||
|
||||
bench(`${alg}-${digest} (Bun.CryptoHasher.hash)`, () => {
|
||||
return Bun.CryptoHasher.hash(alg, data, digest);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
19
bench/yaml/bun.lock
Normal file
19
bench/yaml/bun.lock
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "yaml-benchmark",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
|
||||
"js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
|
||||
|
||||
"yaml": ["yaml@2.8.1", "", { "bin": { "yaml": "bin.mjs" } }, "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw=="],
|
||||
}
|
||||
}
|
||||
8
bench/yaml/package.json
Normal file
8
bench/yaml/package.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "yaml-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0",
|
||||
"yaml": "^2.8.1"
|
||||
}
|
||||
}
|
||||
368
bench/yaml/yaml-parse.mjs
Normal file
368
bench/yaml/yaml-parse.mjs
Normal file
@@ -0,0 +1,368 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small YAML document
|
||||
const smallYaml = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
email: john@example.com
|
||||
active: true
|
||||
`;
|
||||
|
||||
// Medium YAML document with nested structures
|
||||
const mediumYaml = `
|
||||
company: Acme Corp
|
||||
employees:
|
||||
- name: John Doe
|
||||
age: 30
|
||||
position: Developer
|
||||
skills:
|
||||
- JavaScript
|
||||
- TypeScript
|
||||
- Node.js
|
||||
- name: Jane Smith
|
||||
age: 28
|
||||
position: Designer
|
||||
skills:
|
||||
- Figma
|
||||
- Photoshop
|
||||
- Illustrator
|
||||
- name: Bob Johnson
|
||||
age: 35
|
||||
position: Manager
|
||||
skills:
|
||||
- Leadership
|
||||
- Communication
|
||||
- Planning
|
||||
settings:
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: mydb
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 3600
|
||||
`;
|
||||
|
||||
// Large YAML document with complex structures
|
||||
const largeYaml = `
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: nginx-deployment
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
replicas: 3
|
||||
selector:
|
||||
matchLabels:
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:1.14.2
|
||||
ports:
|
||||
- containerPort: 80
|
||||
env:
|
||||
- name: ENV_VAR_1
|
||||
value: "value1"
|
||||
- name: ENV_VAR_2
|
||||
value: "value2"
|
||||
volumeMounts:
|
||||
- name: config
|
||||
mountPath: /etc/nginx
|
||||
resources:
|
||||
limits:
|
||||
cpu: "1"
|
||||
memory: "1Gi"
|
||||
requests:
|
||||
cpu: "0.5"
|
||||
memory: "512Mi"
|
||||
volumes:
|
||||
- name: config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
items:
|
||||
- key: nginx.conf
|
||||
path: nginx.conf
|
||||
- key: mime.types
|
||||
path: mime.types
|
||||
nodeSelector:
|
||||
disktype: ssd
|
||||
tolerations:
|
||||
- key: "key1"
|
||||
operator: "Equal"
|
||||
value: "value1"
|
||||
effect: "NoSchedule"
|
||||
- key: "key2"
|
||||
operator: "Exists"
|
||||
effect: "NoExecute"
|
||||
affinity:
|
||||
nodeAffinity:
|
||||
requiredDuringSchedulingIgnoredDuringExecution:
|
||||
nodeSelectorTerms:
|
||||
- matchExpressions:
|
||||
- key: kubernetes.io/e2e-az-name
|
||||
operator: In
|
||||
values:
|
||||
- e2e-az1
|
||||
- e2e-az2
|
||||
podAntiAffinity:
|
||||
preferredDuringSchedulingIgnoredDuringExecution:
|
||||
- weight: 100
|
||||
podAffinityTerm:
|
||||
labelSelector:
|
||||
matchExpressions:
|
||||
- key: app
|
||||
operator: In
|
||||
values:
|
||||
- web-store
|
||||
topologyKey: kubernetes.io/hostname
|
||||
`;
|
||||
|
||||
// YAML with anchors and references
|
||||
const yamlWithAnchors = `
|
||||
defaults: &defaults
|
||||
adapter: postgresql
|
||||
host: localhost
|
||||
port: 5432
|
||||
|
||||
development:
|
||||
<<: *defaults
|
||||
database: dev_db
|
||||
|
||||
test:
|
||||
<<: *defaults
|
||||
database: test_db
|
||||
|
||||
production:
|
||||
<<: *defaults
|
||||
database: prod_db
|
||||
host: prod.example.com
|
||||
`;
|
||||
|
||||
// Array of items
|
||||
const arrayYaml = `
|
||||
- id: 1
|
||||
name: Item 1
|
||||
price: 10.99
|
||||
tags: [electronics, gadgets]
|
||||
- id: 2
|
||||
name: Item 2
|
||||
price: 25.50
|
||||
tags: [books, education]
|
||||
- id: 3
|
||||
name: Item 3
|
||||
price: 5.00
|
||||
tags: [food, snacks]
|
||||
- id: 4
|
||||
name: Item 4
|
||||
price: 100.00
|
||||
tags: [electronics, computers]
|
||||
- id: 5
|
||||
name: Item 5
|
||||
price: 15.75
|
||||
tags: [clothing, accessories]
|
||||
`;
|
||||
|
||||
// Multiline strings
|
||||
const multilineYaml = `
|
||||
description: |
|
||||
This is a multiline string
|
||||
that preserves line breaks
|
||||
and indentation.
|
||||
|
||||
It can contain multiple paragraphs
|
||||
and special characters: !@#$%^&*()
|
||||
|
||||
folded: >
|
||||
This is a folded string
|
||||
where line breaks are converted
|
||||
to spaces unless there are
|
||||
|
||||
empty lines like above.
|
||||
plain: This is a plain string
|
||||
quoted: "This is a quoted string with \\"escapes\\""
|
||||
literal: 'This is a literal string with ''quotes'''
|
||||
`;
|
||||
|
||||
// Numbers and special values
|
||||
const numbersYaml = `
|
||||
integer: 42
|
||||
negative: -17
|
||||
float: 3.14159
|
||||
scientific: 1.23e-4
|
||||
infinity: .inf
|
||||
negativeInfinity: -.inf
|
||||
notANumber: .nan
|
||||
octal: 0o755
|
||||
hex: 0xFF
|
||||
binary: 0b1010
|
||||
`;
|
||||
|
||||
// Dates and timestamps
|
||||
const datesYaml = `
|
||||
date: 2024-01-15
|
||||
datetime: 2024-01-15T10:30:00Z
|
||||
timestamp: 2024-01-15 10:30:00.123456789 -05:00
|
||||
canonical: 2024-01-15T10:30:00.123456789Z
|
||||
`;
|
||||
|
||||
// Parse benchmarks
|
||||
group("parse small YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(smallYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(smallYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(smallYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse medium YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(mediumYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(mediumYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(mediumYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse large YAML", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(largeYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(largeYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(largeYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(yamlWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(yamlWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(yamlWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(arrayYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(arrayYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(arrayYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(multilineYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(multilineYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(multilineYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(numbersYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(numbersYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(numbersYaml);
|
||||
});
|
||||
});
|
||||
|
||||
group("parse YAML with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.parse", () => {
|
||||
globalThis.result = Bun.YAML.parse(datesYaml);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.load", () => {
|
||||
globalThis.result = jsYaml.load(datesYaml);
|
||||
});
|
||||
|
||||
bench("yaml.parse", () => {
|
||||
globalThis.result = yaml.parse(datesYaml);
|
||||
});
|
||||
});
|
||||
|
||||
// // Stringify benchmarks
|
||||
// const smallObjJs = jsYaml.load(smallYaml);
|
||||
// const mediumObjJs = jsYaml.load(mediumYaml);
|
||||
// const largeObjJs = jsYaml.load(largeYaml);
|
||||
|
||||
// group("stringify small object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(smallObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify medium object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(mediumObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
// group("stringify large object", () => {
|
||||
// bench("js-yaml.dump", () => {
|
||||
// globalThis.result = jsYaml.dump(largeObjJs);
|
||||
// });
|
||||
// });
|
||||
|
||||
await run();
|
||||
407
bench/yaml/yaml-stringify.mjs
Normal file
407
bench/yaml/yaml-stringify.mjs
Normal file
@@ -0,0 +1,407 @@
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
import jsYaml from "js-yaml";
|
||||
import yaml from "yaml";
|
||||
|
||||
// Small object
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
// Medium object with nested structures
|
||||
const mediumObject = {
|
||||
company: "Acme Corp",
|
||||
employees: [
|
||||
{
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
position: "Developer",
|
||||
skills: ["JavaScript", "TypeScript", "Node.js"],
|
||||
},
|
||||
{
|
||||
name: "Jane Smith",
|
||||
age: 28,
|
||||
position: "Designer",
|
||||
skills: ["Figma", "Photoshop", "Illustrator"],
|
||||
},
|
||||
{
|
||||
name: "Bob Johnson",
|
||||
age: 35,
|
||||
position: "Manager",
|
||||
skills: ["Leadership", "Communication", "Planning"],
|
||||
},
|
||||
],
|
||||
settings: {
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "mydb",
|
||||
},
|
||||
cache: {
|
||||
enabled: true,
|
||||
ttl: 3600,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Large object with complex structures
|
||||
const largeObject = {
|
||||
apiVersion: "apps/v1",
|
||||
kind: "Deployment",
|
||||
metadata: {
|
||||
name: "nginx-deployment",
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
replicas: 3,
|
||||
selector: {
|
||||
matchLabels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
template: {
|
||||
metadata: {
|
||||
labels: {
|
||||
app: "nginx",
|
||||
},
|
||||
},
|
||||
spec: {
|
||||
containers: [
|
||||
{
|
||||
name: "nginx",
|
||||
image: "nginx:1.14.2",
|
||||
ports: [
|
||||
{
|
||||
containerPort: 80,
|
||||
},
|
||||
],
|
||||
env: [
|
||||
{
|
||||
name: "ENV_VAR_1",
|
||||
value: "value1",
|
||||
},
|
||||
{
|
||||
name: "ENV_VAR_2",
|
||||
value: "value2",
|
||||
},
|
||||
],
|
||||
volumeMounts: [
|
||||
{
|
||||
name: "config",
|
||||
mountPath: "/etc/nginx",
|
||||
},
|
||||
],
|
||||
resources: {
|
||||
limits: {
|
||||
cpu: "1",
|
||||
memory: "1Gi",
|
||||
},
|
||||
requests: {
|
||||
cpu: "0.5",
|
||||
memory: "512Mi",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
volumes: [
|
||||
{
|
||||
name: "config",
|
||||
configMap: {
|
||||
name: "nginx-config",
|
||||
items: [
|
||||
{
|
||||
key: "nginx.conf",
|
||||
path: "nginx.conf",
|
||||
},
|
||||
{
|
||||
key: "mime.types",
|
||||
path: "mime.types",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
nodeSelector: {
|
||||
disktype: "ssd",
|
||||
},
|
||||
tolerations: [
|
||||
{
|
||||
key: "key1",
|
||||
operator: "Equal",
|
||||
value: "value1",
|
||||
effect: "NoSchedule",
|
||||
},
|
||||
{
|
||||
key: "key2",
|
||||
operator: "Exists",
|
||||
effect: "NoExecute",
|
||||
},
|
||||
],
|
||||
affinity: {
|
||||
nodeAffinity: {
|
||||
requiredDuringSchedulingIgnoredDuringExecution: {
|
||||
nodeSelectorTerms: [
|
||||
{
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "kubernetes.io/e2e-az-name",
|
||||
operator: "In",
|
||||
values: ["e2e-az1", "e2e-az2"],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
podAntiAffinity: {
|
||||
preferredDuringSchedulingIgnoredDuringExecution: [
|
||||
{
|
||||
weight: 100,
|
||||
podAffinityTerm: {
|
||||
labelSelector: {
|
||||
matchExpressions: [
|
||||
{
|
||||
key: "app",
|
||||
operator: "In",
|
||||
values: ["web-store"],
|
||||
},
|
||||
],
|
||||
},
|
||||
topologyKey: "kubernetes.io/hostname",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Object with anchors and references (after resolution)
|
||||
const objectWithAnchors = {
|
||||
defaults: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
},
|
||||
development: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "dev_db",
|
||||
},
|
||||
test: {
|
||||
adapter: "postgresql",
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
database: "test_db",
|
||||
},
|
||||
production: {
|
||||
adapter: "postgresql",
|
||||
host: "prod.example.com",
|
||||
port: 5432,
|
||||
database: "prod_db",
|
||||
},
|
||||
};
|
||||
|
||||
// Array of items
|
||||
const arrayObject = [
|
||||
{
|
||||
id: 1,
|
||||
name: "Item 1",
|
||||
price: 10.99,
|
||||
tags: ["electronics", "gadgets"],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "Item 2",
|
||||
price: 25.5,
|
||||
tags: ["books", "education"],
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "Item 3",
|
||||
price: 5.0,
|
||||
tags: ["food", "snacks"],
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "Item 4",
|
||||
price: 100.0,
|
||||
tags: ["electronics", "computers"],
|
||||
},
|
||||
{
|
||||
id: 5,
|
||||
name: "Item 5",
|
||||
price: 15.75,
|
||||
tags: ["clothing", "accessories"],
|
||||
},
|
||||
];
|
||||
|
||||
// Multiline strings
|
||||
const multilineObject = {
|
||||
description:
|
||||
"This is a multiline string\nthat preserves line breaks\nand indentation.\n\nIt can contain multiple paragraphs\nand special characters: !@#$%^&*()\n",
|
||||
folded: "This is a folded string where line breaks are converted to spaces unless there are\nempty lines like above.",
|
||||
plain: "This is a plain string",
|
||||
quoted: 'This is a quoted string with "escapes"',
|
||||
literal: "This is a literal string with 'quotes'",
|
||||
};
|
||||
|
||||
// Numbers and special values
|
||||
const numbersObject = {
|
||||
integer: 42,
|
||||
negative: -17,
|
||||
float: 3.14159,
|
||||
scientific: 0.000123,
|
||||
infinity: Infinity,
|
||||
negativeInfinity: -Infinity,
|
||||
notANumber: NaN,
|
||||
octal: 493, // 0o755
|
||||
hex: 255, // 0xFF
|
||||
binary: 10, // 0b1010
|
||||
};
|
||||
|
||||
// Dates and timestamps
|
||||
const datesObject = {
|
||||
date: new Date("2024-01-15"),
|
||||
datetime: new Date("2024-01-15T10:30:00Z"),
|
||||
timestamp: new Date("2024-01-15T15:30:00.123456789Z"), // Adjusted for UTC-5
|
||||
canonical: new Date("2024-01-15T10:30:00.123456789Z"),
|
||||
};
|
||||
|
||||
// Stringify benchmarks
|
||||
group("stringify small object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(smallObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(smallObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(smallObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify medium object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(mediumObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(mediumObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(mediumObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify large object", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(largeObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(largeObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(largeObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with anchors", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(objectWithAnchors);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(objectWithAnchors);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(objectWithAnchors);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify array", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(arrayObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(arrayObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(arrayObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with multiline strings", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(multilineObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(multilineObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(multilineObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with numbers", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(numbersObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(numbersObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(numbersObject);
|
||||
});
|
||||
});
|
||||
|
||||
group("stringify object with dates", () => {
|
||||
if (typeof Bun !== "undefined" && Bun.YAML) {
|
||||
bench("Bun.YAML.stringify", () => {
|
||||
return Bun.YAML.stringify(datesObject);
|
||||
});
|
||||
}
|
||||
|
||||
bench("js-yaml.dump", () => {
|
||||
return jsYaml.dump(datesObject);
|
||||
});
|
||||
|
||||
bench("yaml.stringify", () => {
|
||||
return yaml.stringify(datesObject);
|
||||
});
|
||||
});
|
||||
|
||||
await run();
|
||||
3
bun.lock
3
bun.lock
@@ -6,6 +6,7 @@
|
||||
"devDependencies": {
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
@@ -39,8 +40,8 @@
|
||||
},
|
||||
},
|
||||
"overrides": {
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
"@types/bun": "workspace:packages/@types/bun",
|
||||
"bun-types": "workspace:packages/bun-types",
|
||||
},
|
||||
"packages": {
|
||||
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
|
||||
|
||||
@@ -57,6 +57,23 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
|
||||
if(ENABLE_WINDOWS_CODESIGNING)
|
||||
message(STATUS "Windows code signing: ENABLED")
|
||||
|
||||
# Check for required environment variables
|
||||
if(NOT DEFINED ENV{SM_API_KEY})
|
||||
message(WARNING "SM_API_KEY not set - code signing may fail")
|
||||
endif()
|
||||
if(NOT DEFINED ENV{SM_CLIENT_CERT_FILE})
|
||||
message(WARNING "SM_CLIENT_CERT_FILE not set - code signing may fail")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(LINUX)
|
||||
if(EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_ABI "musl")
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
src/bake/bake.d.ts
|
||||
src/bake/bake.private.d.ts
|
||||
src/bake/bun-framework-react/index.ts
|
||||
src/bake/client/css-reloader.ts
|
||||
src/bake/client/data-view.ts
|
||||
src/bake/client/error-serialization.ts
|
||||
src/bake/client/inspect.ts
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.css
|
||||
src/bake/client/JavaScriptSyntaxHighlighter.ts
|
||||
src/bake/client/overlay.css
|
||||
src/bake/client/overlay.ts
|
||||
src/bake/client/stack-trace.ts
|
||||
src/bake/client/websocket.ts
|
||||
src/bake/debug.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bake/enums.ts
|
||||
src/bake/hmr-module.ts
|
||||
src/bake/hmr-runtime-client.ts
|
||||
src/bake/hmr-runtime-error.ts
|
||||
src/bake/hmr-runtime-server.ts
|
||||
src/bake/server/stack-trace-stub.ts
|
||||
src/bake/shared.ts
|
||||
@@ -1,7 +0,0 @@
|
||||
src/bake.bind.ts
|
||||
src/bake/DevServer.bind.ts
|
||||
src/bun.js/api/BunObject.bind.ts
|
||||
src/bun.js/bindgen_test.bind.ts
|
||||
src/bun.js/bindings/NodeModuleModule.bind.ts
|
||||
src/bun.js/node/node_os.bind.ts
|
||||
src/fmt.bind.ts
|
||||
@@ -1,12 +0,0 @@
|
||||
packages/bun-error/bun-error.css
|
||||
packages/bun-error/img/close.png
|
||||
packages/bun-error/img/error.png
|
||||
packages/bun-error/img/powered-by.png
|
||||
packages/bun-error/img/powered-by.webp
|
||||
packages/bun-error/index.tsx
|
||||
packages/bun-error/markdown.ts
|
||||
packages/bun-error/package.json
|
||||
packages/bun-error/runtime-error.ts
|
||||
packages/bun-error/sourcemap.ts
|
||||
packages/bun-error/stack-trace-parser.ts
|
||||
packages/bun-error/tsconfig.json
|
||||
@@ -1,15 +0,0 @@
|
||||
packages/bun-usockets/src/bsd.c
|
||||
packages/bun-usockets/src/context.c
|
||||
packages/bun-usockets/src/crypto/openssl.c
|
||||
packages/bun-usockets/src/eventing/epoll_kqueue.c
|
||||
packages/bun-usockets/src/eventing/libuv.c
|
||||
packages/bun-usockets/src/loop.c
|
||||
packages/bun-usockets/src/quic.c
|
||||
packages/bun-usockets/src/socket.c
|
||||
packages/bun-usockets/src/udp.c
|
||||
src/asan-config.c
|
||||
src/bun.js/bindings/node/http/llhttp/api.c
|
||||
src/bun.js/bindings/node/http/llhttp/http.c
|
||||
src/bun.js/bindings/node/http/llhttp/llhttp.c
|
||||
src/bun.js/bindings/uv-posix-polyfills.c
|
||||
src/bun.js/bindings/uv-posix-stubs.c
|
||||
@@ -87,6 +87,7 @@ src/bun.js/bindings/JSNodePerformanceHooksHistogramConstructor.cpp
|
||||
src/bun.js/bindings/JSNodePerformanceHooksHistogramPrototype.cpp
|
||||
src/bun.js/bindings/JSPropertyIterator.cpp
|
||||
src/bun.js/bindings/JSS3File.cpp
|
||||
src/bun.js/bindings/JSSecrets.cpp
|
||||
src/bun.js/bindings/JSSocketAddressDTO.cpp
|
||||
src/bun.js/bindings/JSStringDecoder.cpp
|
||||
src/bun.js/bindings/JSWrappingFunction.cpp
|
||||
@@ -94,6 +95,7 @@ src/bun.js/bindings/JSX509Certificate.cpp
|
||||
src/bun.js/bindings/JSX509CertificateConstructor.cpp
|
||||
src/bun.js/bindings/JSX509CertificatePrototype.cpp
|
||||
src/bun.js/bindings/linux_perf_tracing.cpp
|
||||
src/bun.js/bindings/MarkedArgumentBufferBinding.cpp
|
||||
src/bun.js/bindings/MarkingConstraint.cpp
|
||||
src/bun.js/bindings/ModuleLoader.cpp
|
||||
src/bun.js/bindings/napi_external.cpp
|
||||
@@ -188,11 +190,15 @@ src/bun.js/bindings/ProcessIdentifier.cpp
|
||||
src/bun.js/bindings/RegularExpression.cpp
|
||||
src/bun.js/bindings/S3Error.cpp
|
||||
src/bun.js/bindings/ScriptExecutionContext.cpp
|
||||
src/bun.js/bindings/SecretsDarwin.cpp
|
||||
src/bun.js/bindings/SecretsLinux.cpp
|
||||
src/bun.js/bindings/SecretsWindows.cpp
|
||||
src/bun.js/bindings/Serialization.cpp
|
||||
src/bun.js/bindings/ServerRouteList.cpp
|
||||
src/bun.js/bindings/spawn.cpp
|
||||
src/bun.js/bindings/SQLClient.cpp
|
||||
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
|
||||
src/bun.js/bindings/StringBuilderBinding.cpp
|
||||
src/bun.js/bindings/stripANSI.cpp
|
||||
src/bun.js/bindings/Strong.cpp
|
||||
src/bun.js/bindings/TextCodec.cpp
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
src/codegen/bake-codegen.ts
|
||||
src/codegen/bindgen-lib-internal.ts
|
||||
src/codegen/bindgen-lib.ts
|
||||
src/codegen/bindgen.ts
|
||||
src/codegen/buildTypeFlag.ts
|
||||
src/codegen/builtin-parser.ts
|
||||
src/codegen/bundle-functions.ts
|
||||
src/codegen/bundle-modules.ts
|
||||
src/codegen/class-definitions.ts
|
||||
src/codegen/client-js.ts
|
||||
src/codegen/cppbind.ts
|
||||
src/codegen/create-hash-table.ts
|
||||
src/codegen/generate-classes.ts
|
||||
src/codegen/generate-compact-string-table.ts
|
||||
src/codegen/generate-js2native.ts
|
||||
src/codegen/generate-jssink.ts
|
||||
src/codegen/generate-node-errors.ts
|
||||
src/codegen/helpers.ts
|
||||
src/codegen/internal-module-registry-scanner.ts
|
||||
src/codegen/replacements.ts
|
||||
src/codegen/shared-types.ts
|
||||
@@ -1,165 +0,0 @@
|
||||
src/js/builtins.d.ts
|
||||
src/js/builtins/Bake.ts
|
||||
src/js/builtins/BundlerPlugin.ts
|
||||
src/js/builtins/ByteLengthQueuingStrategy.ts
|
||||
src/js/builtins/CommonJS.ts
|
||||
src/js/builtins/ConsoleObject.ts
|
||||
src/js/builtins/CountQueuingStrategy.ts
|
||||
src/js/builtins/Glob.ts
|
||||
src/js/builtins/ImportMetaObject.ts
|
||||
src/js/builtins/Ipc.ts
|
||||
src/js/builtins/JSBufferConstructor.ts
|
||||
src/js/builtins/JSBufferPrototype.ts
|
||||
src/js/builtins/NodeModuleObject.ts
|
||||
src/js/builtins/Peek.ts
|
||||
src/js/builtins/ProcessObjectInternals.ts
|
||||
src/js/builtins/ReadableByteStreamController.ts
|
||||
src/js/builtins/ReadableByteStreamInternals.ts
|
||||
src/js/builtins/ReadableStream.ts
|
||||
src/js/builtins/ReadableStreamBYOBReader.ts
|
||||
src/js/builtins/ReadableStreamBYOBRequest.ts
|
||||
src/js/builtins/ReadableStreamDefaultController.ts
|
||||
src/js/builtins/ReadableStreamDefaultReader.ts
|
||||
src/js/builtins/ReadableStreamInternals.ts
|
||||
src/js/builtins/shell.ts
|
||||
src/js/builtins/StreamInternals.ts
|
||||
src/js/builtins/TextDecoderStream.ts
|
||||
src/js/builtins/TextEncoderStream.ts
|
||||
src/js/builtins/TransformStream.ts
|
||||
src/js/builtins/TransformStreamDefaultController.ts
|
||||
src/js/builtins/TransformStreamInternals.ts
|
||||
src/js/builtins/UtilInspect.ts
|
||||
src/js/builtins/WasmStreaming.ts
|
||||
src/js/builtins/WritableStreamDefaultController.ts
|
||||
src/js/builtins/WritableStreamDefaultWriter.ts
|
||||
src/js/builtins/WritableStreamInternals.ts
|
||||
src/js/bun/ffi.ts
|
||||
src/js/bun/sql.ts
|
||||
src/js/bun/sqlite.ts
|
||||
src/js/internal-for-testing.ts
|
||||
src/js/internal/abort_listener.ts
|
||||
src/js/internal/assert/assertion_error.ts
|
||||
src/js/internal/assert/calltracker.ts
|
||||
src/js/internal/assert/myers_diff.ts
|
||||
src/js/internal/assert/utils.ts
|
||||
src/js/internal/buffer.ts
|
||||
src/js/internal/cluster/child.ts
|
||||
src/js/internal/cluster/isPrimary.ts
|
||||
src/js/internal/cluster/primary.ts
|
||||
src/js/internal/cluster/RoundRobinHandle.ts
|
||||
src/js/internal/cluster/Worker.ts
|
||||
src/js/internal/crypto/x509.ts
|
||||
src/js/internal/debugger.ts
|
||||
src/js/internal/errors.ts
|
||||
src/js/internal/fifo.ts
|
||||
src/js/internal/fixed_queue.ts
|
||||
src/js/internal/freelist.ts
|
||||
src/js/internal/fs/cp-sync.ts
|
||||
src/js/internal/fs/cp.ts
|
||||
src/js/internal/fs/glob.ts
|
||||
src/js/internal/fs/streams.ts
|
||||
src/js/internal/html.ts
|
||||
src/js/internal/http.ts
|
||||
src/js/internal/http/FakeSocket.ts
|
||||
src/js/internal/linkedlist.ts
|
||||
src/js/internal/primordials.js
|
||||
src/js/internal/promisify.ts
|
||||
src/js/internal/shared.ts
|
||||
src/js/internal/stream.promises.ts
|
||||
src/js/internal/stream.ts
|
||||
src/js/internal/streams/add-abort-signal.ts
|
||||
src/js/internal/streams/compose.ts
|
||||
src/js/internal/streams/destroy.ts
|
||||
src/js/internal/streams/duplex.ts
|
||||
src/js/internal/streams/duplexify.ts
|
||||
src/js/internal/streams/duplexpair.ts
|
||||
src/js/internal/streams/end-of-stream.ts
|
||||
src/js/internal/streams/from.ts
|
||||
src/js/internal/streams/lazy_transform.ts
|
||||
src/js/internal/streams/legacy.ts
|
||||
src/js/internal/streams/native-readable.ts
|
||||
src/js/internal/streams/operators.ts
|
||||
src/js/internal/streams/passthrough.ts
|
||||
src/js/internal/streams/pipeline.ts
|
||||
src/js/internal/streams/readable.ts
|
||||
src/js/internal/streams/state.ts
|
||||
src/js/internal/streams/transform.ts
|
||||
src/js/internal/streams/utils.ts
|
||||
src/js/internal/streams/writable.ts
|
||||
src/js/internal/timers.ts
|
||||
src/js/internal/tls.ts
|
||||
src/js/internal/tty.ts
|
||||
src/js/internal/url.ts
|
||||
src/js/internal/util/colors.ts
|
||||
src/js/internal/util/inspect.d.ts
|
||||
src/js/internal/util/inspect.js
|
||||
src/js/internal/util/mime.ts
|
||||
src/js/internal/validators.ts
|
||||
src/js/internal/webstreams_adapters.ts
|
||||
src/js/node/_http_agent.ts
|
||||
src/js/node/_http_client.ts
|
||||
src/js/node/_http_common.ts
|
||||
src/js/node/_http_incoming.ts
|
||||
src/js/node/_http_outgoing.ts
|
||||
src/js/node/_http_server.ts
|
||||
src/js/node/_stream_duplex.ts
|
||||
src/js/node/_stream_passthrough.ts
|
||||
src/js/node/_stream_readable.ts
|
||||
src/js/node/_stream_transform.ts
|
||||
src/js/node/_stream_wrap.ts
|
||||
src/js/node/_stream_writable.ts
|
||||
src/js/node/_tls_common.ts
|
||||
src/js/node/assert.strict.ts
|
||||
src/js/node/assert.ts
|
||||
src/js/node/async_hooks.ts
|
||||
src/js/node/child_process.ts
|
||||
src/js/node/cluster.ts
|
||||
src/js/node/console.ts
|
||||
src/js/node/crypto.ts
|
||||
src/js/node/dgram.ts
|
||||
src/js/node/diagnostics_channel.ts
|
||||
src/js/node/dns.promises.ts
|
||||
src/js/node/dns.ts
|
||||
src/js/node/domain.ts
|
||||
src/js/node/events.ts
|
||||
src/js/node/fs.promises.ts
|
||||
src/js/node/fs.ts
|
||||
src/js/node/http.ts
|
||||
src/js/node/http2.ts
|
||||
src/js/node/https.ts
|
||||
src/js/node/inspector.ts
|
||||
src/js/node/net.ts
|
||||
src/js/node/os.ts
|
||||
src/js/node/path.posix.ts
|
||||
src/js/node/path.ts
|
||||
src/js/node/path.win32.ts
|
||||
src/js/node/perf_hooks.ts
|
||||
src/js/node/punycode.ts
|
||||
src/js/node/querystring.ts
|
||||
src/js/node/readline.promises.ts
|
||||
src/js/node/readline.ts
|
||||
src/js/node/repl.ts
|
||||
src/js/node/stream.consumers.ts
|
||||
src/js/node/stream.promises.ts
|
||||
src/js/node/stream.ts
|
||||
src/js/node/stream.web.ts
|
||||
src/js/node/test.ts
|
||||
src/js/node/timers.promises.ts
|
||||
src/js/node/timers.ts
|
||||
src/js/node/tls.ts
|
||||
src/js/node/trace_events.ts
|
||||
src/js/node/tty.ts
|
||||
src/js/node/url.ts
|
||||
src/js/node/util.ts
|
||||
src/js/node/v8.ts
|
||||
src/js/node/vm.ts
|
||||
src/js/node/wasi.ts
|
||||
src/js/node/worker_threads.ts
|
||||
src/js/node/zlib.ts
|
||||
src/js/private.d.ts
|
||||
src/js/thirdparty/isomorphic-fetch.ts
|
||||
src/js/thirdparty/node-fetch.ts
|
||||
src/js/thirdparty/undici.js
|
||||
src/js/thirdparty/vercel_fetch.js
|
||||
src/js/thirdparty/ws.js
|
||||
src/js/wasi-runner.js
|
||||
@@ -1,24 +0,0 @@
|
||||
src/node-fallbacks/assert.js
|
||||
src/node-fallbacks/buffer.js
|
||||
src/node-fallbacks/console.js
|
||||
src/node-fallbacks/constants.js
|
||||
src/node-fallbacks/crypto.js
|
||||
src/node-fallbacks/domain.js
|
||||
src/node-fallbacks/events.js
|
||||
src/node-fallbacks/http.js
|
||||
src/node-fallbacks/https.js
|
||||
src/node-fallbacks/net.js
|
||||
src/node-fallbacks/os.js
|
||||
src/node-fallbacks/path.js
|
||||
src/node-fallbacks/process.js
|
||||
src/node-fallbacks/punycode.js
|
||||
src/node-fallbacks/querystring.js
|
||||
src/node-fallbacks/stream.js
|
||||
src/node-fallbacks/string_decoder.js
|
||||
src/node-fallbacks/sys.js
|
||||
src/node-fallbacks/timers.js
|
||||
src/node-fallbacks/timers.promises.js
|
||||
src/node-fallbacks/tty.js
|
||||
src/node-fallbacks/url.js
|
||||
src/node-fallbacks/util.js
|
||||
src/node-fallbacks/zlib.js
|
||||
@@ -1,25 +0,0 @@
|
||||
src/bun.js/api/BunObject.classes.ts
|
||||
src/bun.js/api/crypto.classes.ts
|
||||
src/bun.js/api/ffi.classes.ts
|
||||
src/bun.js/api/filesystem_router.classes.ts
|
||||
src/bun.js/api/Glob.classes.ts
|
||||
src/bun.js/api/h2.classes.ts
|
||||
src/bun.js/api/html_rewriter.classes.ts
|
||||
src/bun.js/api/JSBundler.classes.ts
|
||||
src/bun.js/api/postgres.classes.ts
|
||||
src/bun.js/api/ResumableSink.classes.ts
|
||||
src/bun.js/api/S3Client.classes.ts
|
||||
src/bun.js/api/S3Stat.classes.ts
|
||||
src/bun.js/api/server.classes.ts
|
||||
src/bun.js/api/Shell.classes.ts
|
||||
src/bun.js/api/ShellArgs.classes.ts
|
||||
src/bun.js/api/sockets.classes.ts
|
||||
src/bun.js/api/sourcemap.classes.ts
|
||||
src/bun.js/api/streams.classes.ts
|
||||
src/bun.js/api/valkey.classes.ts
|
||||
src/bun.js/api/zlib.classes.ts
|
||||
src/bun.js/node/node.classes.ts
|
||||
src/bun.js/resolve_message.classes.ts
|
||||
src/bun.js/test/jest.classes.ts
|
||||
src/bun.js/webcore/encoding.classes.ts
|
||||
src/bun.js/webcore/response.classes.ts
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1205,6 +1205,7 @@ if(NOT BUN_CPP_ONLY)
|
||||
endif()
|
||||
|
||||
if(bunStrip)
|
||||
# First, strip bun-profile.exe to create bun.exe
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
@@ -1225,6 +1226,48 @@ if(NOT BUN_CPP_ONLY)
|
||||
OUTPUTS
|
||||
${BUILD_PATH}/${bunStripExe}
|
||||
)
|
||||
|
||||
# Then sign both executables on Windows
|
||||
if(WIN32 AND ENABLE_WINDOWS_CODESIGNING)
|
||||
set(SIGN_SCRIPT "${CMAKE_SOURCE_DIR}/.buildkite/scripts/sign-windows.ps1")
|
||||
|
||||
# Verify signing script exists
|
||||
if(NOT EXISTS "${SIGN_SCRIPT}")
|
||||
message(FATAL_ERROR "Windows signing script not found: ${SIGN_SCRIPT}")
|
||||
endif()
|
||||
|
||||
# Use PowerShell for Windows code signing (native Windows, no path issues)
|
||||
find_program(POWERSHELL_EXECUTABLE
|
||||
NAMES pwsh.exe powershell.exe
|
||||
PATHS
|
||||
"C:/Program Files/PowerShell/7"
|
||||
"C:/Program Files (x86)/PowerShell/7"
|
||||
"C:/Windows/System32/WindowsPowerShell/v1.0"
|
||||
DOC "Path to PowerShell executable"
|
||||
)
|
||||
|
||||
if(NOT POWERSHELL_EXECUTABLE)
|
||||
set(POWERSHELL_EXECUTABLE "powershell.exe")
|
||||
endif()
|
||||
|
||||
message(STATUS "Using PowerShell executable: ${POWERSHELL_EXECUTABLE}")
|
||||
|
||||
# Sign both bun-profile.exe and bun.exe after stripping
|
||||
register_command(
|
||||
TARGET
|
||||
${bun}
|
||||
TARGET_PHASE
|
||||
POST_BUILD
|
||||
COMMENT
|
||||
"Code signing bun-profile.exe and bun.exe with DigiCert KeyLocker"
|
||||
COMMAND
|
||||
"${POWERSHELL_EXECUTABLE}" "-NoProfile" "-ExecutionPolicy" "Bypass" "-File" "${SIGN_SCRIPT}" "-BunProfileExe" "${BUILD_PATH}/${bunExe}" "-BunExe" "${BUILD_PATH}/${bunStripExe}"
|
||||
CWD
|
||||
${CMAKE_SOURCE_DIR}
|
||||
SOURCES
|
||||
${BUILD_PATH}/${bunStripExe}
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# somehow on some Linux systems we need to disable ASLR for ASAN-instrumented binaries to run
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION aa4997abc9126f5a7557c9ecb7e8104779d87ec4)
|
||||
set(WEBKIT_VERSION f474428677de1fafaf13bb3b9a050fe3504dda25)
|
||||
endif()
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
|
||||
@@ -20,7 +20,7 @@ else()
|
||||
unsupported(CMAKE_SYSTEM_NAME)
|
||||
endif()
|
||||
|
||||
set(ZIG_COMMIT "edc6229b1fafb1701a25fb4e17114cc756991546")
|
||||
set(ZIG_COMMIT "e0b7c318f318196c5f81fdf3423816a7b5bb3112")
|
||||
optionx(ZIG_TARGET STRING "The zig target to use" DEFAULT ${DEFAULT_ZIG_TARGET})
|
||||
|
||||
if(CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
|
||||
319
docs/api/secrets.md
Normal file
319
docs/api/secrets.md
Normal file
@@ -0,0 +1,319 @@
|
||||
Store and retrieve sensitive credentials securely using the operating system's native credential storage APIs.
|
||||
|
||||
**Experimental:** This API is new and experimental. It may change in the future.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
const githubToken = await secrets.get({
|
||||
service: "my-cli-tool",
|
||||
name: "github-token",
|
||||
});
|
||||
|
||||
if (!githubToken) {
|
||||
const response = await fetch("https://api.github.com/name", {
|
||||
headers: { "Authorization": `token ${githubToken}` },
|
||||
});
|
||||
console.log("Please enter your GitHub token");
|
||||
} else {
|
||||
await secrets.set({
|
||||
service: "my-cli-tool",
|
||||
name: "github-token",
|
||||
value: prompt("Please enter your GitHub token"),
|
||||
});
|
||||
console.log("GitHub token stored");
|
||||
}
|
||||
```
|
||||
|
||||
## Overview
|
||||
|
||||
`Bun.secrets` provides a cross-platform API for managing sensitive credentials that CLI tools and development applications typically store in plaintext files like `~/.npmrc`, `~/.aws/credentials`, or `.env` files. It uses:
|
||||
|
||||
- **macOS**: Keychain Services
|
||||
- **Linux**: libsecret (GNOME Keyring, KWallet, etc.)
|
||||
- **Windows**: Windows Credential Manager
|
||||
|
||||
All operations are asynchronous and non-blocking, running on Bun's threadpool.
|
||||
|
||||
Note: in the future, we may add an additional `provider` option to make this better for production deployment secrets, but today this API is mostly useful for local development tools.
|
||||
|
||||
## API
|
||||
|
||||
### `Bun.secrets.get(options)`
|
||||
|
||||
Retrieve a stored credential.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
const password = await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
});
|
||||
// Returns: string | null
|
||||
|
||||
// Or if you prefer without an object
|
||||
const password = await Bun.secrets.get("my-app", "alice@example.com");
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
|
||||
**Returns:**
|
||||
|
||||
- `Promise<string | null>` - The stored password, or `null` if not found
|
||||
|
||||
### `Bun.secrets.set(options, value)`
|
||||
|
||||
Store or update a credential.
|
||||
|
||||
```typescript
|
||||
import { secrets } from "bun";
|
||||
|
||||
await secrets.set({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
value: "super-secret-password",
|
||||
});
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
- `value` (string, required) - The password or secret to store
|
||||
|
||||
**Notes:**
|
||||
|
||||
- If a credential already exists for the given service/name combination, it will be replaced
|
||||
- The stored value is encrypted by the operating system
|
||||
|
||||
### `Bun.secrets.delete(options)`
|
||||
|
||||
Delete a stored credential.
|
||||
|
||||
```typescript
|
||||
const deleted = await Bun.secrets.delete({
|
||||
service: "my-app",
|
||||
name: "alice@example.com",
|
||||
value: "super-secret-password",
|
||||
});
|
||||
// Returns: boolean
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `options.service` (string, required) - The service or application name
|
||||
- `options.name` (string, required) - The username or account identifier
|
||||
|
||||
**Returns:**
|
||||
|
||||
- `Promise<boolean>` - `true` if a credential was deleted, `false` if not found
|
||||
|
||||
## Examples
|
||||
|
||||
### Storing CLI Tool Credentials
|
||||
|
||||
```javascript
|
||||
// Store GitHub CLI token (instead of ~/.config/gh/hosts.yml)
|
||||
await Bun.secrets.set({
|
||||
service: "my-app.com",
|
||||
name: "github-token",
|
||||
value: "ghp_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Or if you prefer without an object
|
||||
await Bun.secrets.set("my-app.com", "github-token", "ghp_xxxxxxxxxxxxxxxxxxxx");
|
||||
|
||||
// Store npm registry token (instead of ~/.npmrc)
|
||||
await Bun.secrets.set({
|
||||
service: "npm-registry",
|
||||
name: "https://registry.npmjs.org",
|
||||
value: "npm_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Retrieve for API calls
|
||||
const token = await Bun.secrets.get({
|
||||
service: "gh-cli",
|
||||
name: "github.com",
|
||||
});
|
||||
|
||||
if (token) {
|
||||
const response = await fetch("https://api.github.com/name", {
|
||||
headers: {
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Migrating from Plaintext Config Files
|
||||
|
||||
```javascript
|
||||
// Instead of storing in ~/.aws/credentials
|
||||
await Bun.secrets.set({
|
||||
service: "aws-cli",
|
||||
name: "AWS_SECRET_ACCESS_KEY",
|
||||
value: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
});
|
||||
|
||||
// Instead of .env files with sensitive data
|
||||
await Bun.secrets.set({
|
||||
service: "my-app",
|
||||
name: "api-key",
|
||||
value: "sk_live_xxxxxxxxxxxxxxxxxxxx",
|
||||
});
|
||||
|
||||
// Load at runtime
|
||||
const apiKey =
|
||||
(await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "api-key",
|
||||
})) || process.env.API_KEY; // Fallback for CI/production
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```javascript
|
||||
try {
|
||||
await Bun.secrets.set({
|
||||
service: "my-app",
|
||||
name: "alice",
|
||||
value: "password123",
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Failed to store credential:", error.message);
|
||||
}
|
||||
|
||||
// Check if a credential exists
|
||||
const password = await Bun.secrets.get({
|
||||
service: "my-app",
|
||||
name: "alice",
|
||||
});
|
||||
|
||||
if (password === null) {
|
||||
console.log("No credential found");
|
||||
}
|
||||
```
|
||||
|
||||
### Updating Credentials
|
||||
|
||||
```javascript
|
||||
// Initial password
|
||||
await Bun.secrets.set({
|
||||
service: "email-server",
|
||||
name: "admin@example.com",
|
||||
value: "old-password",
|
||||
});
|
||||
|
||||
// Update to new password
|
||||
await Bun.secrets.set({
|
||||
service: "email-server",
|
||||
name: "admin@example.com",
|
||||
value: "new-password",
|
||||
});
|
||||
|
||||
// The old password is replaced
|
||||
```
|
||||
|
||||
## Platform Behavior
|
||||
|
||||
### macOS (Keychain)
|
||||
|
||||
- Credentials are stored in the name's login keychain
|
||||
- The keychain may prompt for access permission on first use
|
||||
- Credentials persist across system restarts
|
||||
- Accessible by the name who stored them
|
||||
|
||||
### Linux (libsecret)
|
||||
|
||||
- Requires a secret service daemon (GNOME Keyring, KWallet, etc.)
|
||||
- Credentials are stored in the default collection
|
||||
- May prompt for unlock if the keyring is locked
|
||||
- The secret service must be running
|
||||
|
||||
### Windows (Credential Manager)
|
||||
|
||||
- Credentials are stored in Windows Credential Manager
|
||||
- Visible in Control Panel → Credential Manager → Windows Credentials
|
||||
- Persist with `CRED_PERSIST_ENTERPRISE` flag so it's scoped per user
|
||||
- Encrypted using Windows Data Protection API
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **Encryption**: Credentials are encrypted by the operating system's credential manager
|
||||
2. **Access Control**: Only the name who stored the credential can retrieve it
|
||||
3. **No Plain Text**: Passwords are never stored in plain text
|
||||
4. **Memory Safety**: Bun zeros out password memory after use
|
||||
5. **Process Isolation**: Credentials are isolated per name account
|
||||
|
||||
## Limitations
|
||||
|
||||
- Maximum password length varies by platform (typically 2048-4096 bytes)
|
||||
- Service and name names should be reasonable lengths (< 256 characters)
|
||||
- Some special characters may need escaping depending on the platform
|
||||
- Requires appropriate system services:
|
||||
- Linux: Secret service daemon must be running
|
||||
- macOS: Keychain Access must be available
|
||||
- Windows: Credential Manager service must be enabled
|
||||
|
||||
## Comparison with Environment Variables
|
||||
|
||||
Unlike environment variables, `Bun.secrets`:
|
||||
|
||||
- ✅ Encrypts credentials at rest (thanks to the operating system)
|
||||
- ✅ Avoids exposing secrets in process memory dumps (memory is zeroed after its no longer needed)
|
||||
- ✅ Survives application restarts
|
||||
- ✅ Can be updated without restarting the application
|
||||
- ✅ Provides name-level access control
|
||||
- ❌ Requires OS credential service
|
||||
- ❌ Not very useful for deployment secrets (use environment variables in production)
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Use descriptive service names**: Match the tool or application name
|
||||
If you're building a CLI for external use, you probably should use a UTI (Uniform Type Identifier) for the service name.
|
||||
|
||||
```javascript
|
||||
// Good - matches the actual tool
|
||||
{ service: "com.docker.hub", name: "username" }
|
||||
{ service: "com.vercel.cli", name: "team-name" }
|
||||
|
||||
// Avoid - too generic
|
||||
{ service: "api", name: "key" }
|
||||
```
|
||||
|
||||
2. **Credentials-only**: Don't store application configuration in this API
|
||||
This API is slow, you probably still need to use a config file for some things.
|
||||
|
||||
3. **Use for local development tools**:
|
||||
- ✅ CLI tools (gh, npm, docker, kubectl)
|
||||
- ✅ Local development servers
|
||||
- ✅ Personal API keys for testing
|
||||
- ❌ Production servers (use proper secret management)
|
||||
|
||||
## TypeScript
|
||||
|
||||
```typescript
|
||||
namespace Bun {
|
||||
interface SecretsOptions {
|
||||
service: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface Secrets {
|
||||
get(options: SecretsOptions): Promise<string | null>;
|
||||
set(options: SecretsOptions, value: string): Promise<void>;
|
||||
delete(options: SecretsOptions): Promise<boolean>;
|
||||
}
|
||||
|
||||
const secrets: Secrets;
|
||||
}
|
||||
```
|
||||
|
||||
## See Also
|
||||
|
||||
- [Environment Variables](./env.md) - For deployment configuration
|
||||
- [Bun.password](./password.md) - For password hashing and verification
|
||||
718
docs/api/sql.md
718
docs/api/sql.md
@@ -1,20 +1,27 @@
|
||||
Bun provides native bindings for working with PostgreSQL databases with a modern, Promise-based API. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
Bun provides native bindings for working with SQL databases through a unified Promise-based API that supports PostgreSQL, MySQL, and SQLite. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
|
||||
|
||||
```ts
|
||||
import { sql } from "bun";
|
||||
import { sql, SQL } from "bun";
|
||||
|
||||
// PostgreSQL (default)
|
||||
const users = await sql`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${true}
|
||||
LIMIT ${10}
|
||||
`;
|
||||
|
||||
// Select with multiple conditions
|
||||
const activeUsers = await sql`
|
||||
SELECT *
|
||||
FROM users
|
||||
WHERE active = ${true}
|
||||
AND age >= ${18}
|
||||
// With MySQL
|
||||
const mysql = new SQL("mysql://user:pass@localhost:3306/mydb");
|
||||
const mysqlResults = await mysql`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${true}
|
||||
`;
|
||||
|
||||
// With SQLite
|
||||
const sqlite = new SQL("sqlite://myapp.db");
|
||||
const sqliteResults = await sqlite`
|
||||
SELECT * FROM users
|
||||
WHERE active = ${1}
|
||||
`;
|
||||
```
|
||||
|
||||
@@ -44,6 +51,186 @@ const activeUsers = await sql`
|
||||
|
||||
{% /features %}
|
||||
|
||||
## Database Support
|
||||
|
||||
Bun.SQL provides a unified API for multiple database systems:
|
||||
|
||||
### PostgreSQL
|
||||
|
||||
PostgreSQL is used when:
|
||||
|
||||
- The connection string doesn't match SQLite or MySQL patterns (it's the fallback adapter)
|
||||
- The connection string explicitly uses `postgres://` or `postgresql://` protocols
|
||||
- No connection string is provided and environment variables point to PostgreSQL
|
||||
|
||||
```ts
|
||||
import { sql } from "bun";
|
||||
// Uses PostgreSQL if DATABASE_URL is not set or is a PostgreSQL URL
|
||||
await sql`SELECT ...`;
|
||||
|
||||
import { SQL } from "bun";
|
||||
const pg = new SQL("postgres://user:pass@localhost:5432/mydb");
|
||||
await pg`SELECT ...`;
|
||||
```
|
||||
|
||||
### MySQL
|
||||
|
||||
MySQL support is built into Bun.SQL, providing the same tagged template literal interface with full compatibility for MySQL 5.7+ and MySQL 8.0+:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
// MySQL connection
|
||||
const mysql = new SQL("mysql://user:password@localhost:3306/database");
|
||||
const mysql2 = new SQL("mysql2://user:password@localhost:3306/database"); // mysql2 protocol also works
|
||||
|
||||
// Using options object
|
||||
const mysql3 = new SQL({
|
||||
adapter: "mysql",
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
});
|
||||
|
||||
// Works with parameters - automatically uses prepared statements
|
||||
const users = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Transactions work the same as PostgreSQL
|
||||
await mysql.begin(async tx => {
|
||||
await tx`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
await tx`UPDATE accounts SET balance = balance - 100 WHERE user_id = ${userId}`;
|
||||
});
|
||||
|
||||
// Bulk inserts
|
||||
const newUsers = [
|
||||
{ name: "Alice", email: "alice@example.com" },
|
||||
{ name: "Bob", email: "bob@example.com" },
|
||||
];
|
||||
await mysql`INSERT INTO users ${mysql(newUsers)}`;
|
||||
```
|
||||
|
||||
{% details summary="MySQL Connection String Formats" %}
|
||||
|
||||
MySQL accepts various URL formats for connection strings:
|
||||
|
||||
```ts
|
||||
// Standard mysql:// protocol
|
||||
new SQL("mysql://user:pass@localhost:3306/database");
|
||||
new SQL("mysql://user:pass@localhost/database"); // Default port 3306
|
||||
|
||||
// mysql2:// protocol (compatibility with mysql2 npm package)
|
||||
new SQL("mysql2://user:pass@localhost:3306/database");
|
||||
|
||||
// With query parameters
|
||||
new SQL("mysql://user:pass@localhost/db?ssl=true");
|
||||
|
||||
// Unix socket connection
|
||||
new SQL("mysql://user:pass@/database?socket=/var/run/mysqld/mysqld.sock");
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
{% details summary="MySQL-Specific Features" %}
|
||||
|
||||
MySQL databases support:
|
||||
|
||||
- **Prepared statements**: Automatically created for parameterized queries with statement caching
|
||||
- **Binary protocol**: For better performance with prepared statements and accurate type handling
|
||||
- **Multiple result sets**: Support for stored procedures returning multiple result sets
|
||||
- **Authentication plugins**: Support for mysql_native_password, caching_sha2_password (MySQL 8.0 default), and sha256_password
|
||||
- **SSL/TLS connections**: Configurable SSL modes similar to PostgreSQL
|
||||
- **Connection attributes**: Client information sent to server for monitoring
|
||||
- **Query pipelining**: Execute multiple prepared statements without waiting for responses
|
||||
|
||||
{% /details %}
|
||||
|
||||
### SQLite
|
||||
|
||||
SQLite support is built into Bun.SQL, providing the same tagged template literal interface:
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
// In-memory database
|
||||
const memory = new SQL(":memory:");
|
||||
const memory2 = new SQL("sqlite://:memory:");
|
||||
|
||||
// File-based database
|
||||
const db = new SQL("sqlite://myapp.db");
|
||||
|
||||
// Using options object
|
||||
const db2 = new SQL({
|
||||
adapter: "sqlite",
|
||||
filename: "./data/app.db",
|
||||
});
|
||||
|
||||
// For simple filenames, specify adapter explicitly
|
||||
const db3 = new SQL("myapp.db", { adapter: "sqlite" });
|
||||
```
|
||||
|
||||
{% details summary="SQLite Connection String Formats" %}
|
||||
|
||||
SQLite accepts various URL formats for connection strings:
|
||||
|
||||
```ts
|
||||
// Standard sqlite:// protocol
|
||||
new SQL("sqlite://path/to/database.db");
|
||||
new SQL("sqlite:path/to/database.db"); // Without slashes
|
||||
|
||||
// file:// protocol (also recognized as SQLite)
|
||||
new SQL("file://path/to/database.db");
|
||||
new SQL("file:path/to/database.db");
|
||||
|
||||
// Special :memory: database
|
||||
new SQL(":memory:");
|
||||
new SQL("sqlite://:memory:");
|
||||
new SQL("file://:memory:");
|
||||
|
||||
// Relative and absolute paths
|
||||
new SQL("sqlite://./local.db"); // Relative to current directory
|
||||
new SQL("sqlite://../parent/db.db"); // Parent directory
|
||||
new SQL("sqlite:///absolute/path.db"); // Absolute path
|
||||
|
||||
// With query parameters
|
||||
new SQL("sqlite://data.db?mode=ro"); // Read-only mode
|
||||
new SQL("sqlite://data.db?mode=rw"); // Read-write mode (no create)
|
||||
new SQL("sqlite://data.db?mode=rwc"); // Read-write-create mode (default)
|
||||
```
|
||||
|
||||
**Note:** Simple filenames without a protocol (like `"myapp.db"`) require explicitly specifying `{ adapter: "sqlite" }` to avoid ambiguity with PostgreSQL.
|
||||
|
||||
{% /details %}
|
||||
|
||||
{% details summary="SQLite-Specific Options" %}
|
||||
|
||||
SQLite databases support additional configuration options:
|
||||
|
||||
```ts
|
||||
const db = new SQL({
|
||||
adapter: "sqlite",
|
||||
filename: "app.db",
|
||||
|
||||
// SQLite-specific options
|
||||
readonly: false, // Open in read-only mode
|
||||
create: true, // Create database if it doesn't exist
|
||||
readwrite: true, // Open for reading and writing
|
||||
|
||||
// Additional Bun:sqlite options
|
||||
strict: true, // Enable strict mode
|
||||
safeIntegers: false, // Use JavaScript numbers for integers
|
||||
});
|
||||
```
|
||||
|
||||
Query parameters in the URL are parsed to set these options:
|
||||
|
||||
- `?mode=ro` → `readonly: true`
|
||||
- `?mode=rw` → `readonly: false, create: false`
|
||||
- `?mode=rwc` → `readonly: false, create: true` (default)
|
||||
|
||||
{% /details %}
|
||||
|
||||
### Inserting data
|
||||
|
||||
You can pass JavaScript values directly to the SQL template literal and escaping will be handled for you.
|
||||
@@ -251,14 +438,97 @@ await query;
|
||||
|
||||
## Database Environment Variables
|
||||
|
||||
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence.
|
||||
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence and automatically detects the database type based on the connection string format.
|
||||
|
||||
The following environment variables can be used to define the connection URL:
|
||||
### Automatic Database Detection
|
||||
|
||||
When using `Bun.sql()` without arguments or `new SQL()` with a connection string, the adapter is automatically detected based on the URL format:
|
||||
|
||||
#### MySQL Auto-Detection
|
||||
|
||||
MySQL is automatically selected when the connection string matches these patterns:
|
||||
|
||||
- `mysql://...` - MySQL protocol URLs
|
||||
- `mysql2://...` - MySQL2 protocol URLs (compatibility alias)
|
||||
|
||||
```ts
|
||||
// These all use MySQL automatically (no adapter needed)
|
||||
const sql1 = new SQL("mysql://user:pass@localhost/mydb");
|
||||
const sql2 = new SQL("mysql2://user:pass@localhost:3306/mydb");
|
||||
|
||||
// Works with DATABASE_URL environment variable
|
||||
DATABASE_URL="mysql://user:pass@localhost/mydb" bun run app.js
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb" bun run app.js
|
||||
```
|
||||
|
||||
#### SQLite Auto-Detection
|
||||
|
||||
SQLite is automatically selected when the connection string matches these patterns:
|
||||
|
||||
- `:memory:` - In-memory database
|
||||
- `sqlite://...` - SQLite protocol URLs
|
||||
- `sqlite:...` - SQLite protocol without slashes
|
||||
- `file://...` - File protocol URLs
|
||||
- `file:...` - File protocol without slashes
|
||||
|
||||
```ts
|
||||
// These all use SQLite automatically (no adapter needed)
|
||||
const sql1 = new SQL(":memory:");
|
||||
const sql2 = new SQL("sqlite://app.db");
|
||||
const sql3 = new SQL("file://./database.db");
|
||||
|
||||
// Works with DATABASE_URL environment variable
|
||||
DATABASE_URL=":memory:" bun run app.js
|
||||
DATABASE_URL="sqlite://myapp.db" bun run app.js
|
||||
DATABASE_URL="file://./data/app.db" bun run app.js
|
||||
```
|
||||
|
||||
#### PostgreSQL Auto-Detection
|
||||
|
||||
PostgreSQL is the default for connection strings that don't match MySQL or SQLite patterns:
|
||||
|
||||
```bash
|
||||
# PostgreSQL is detected for these patterns
|
||||
DATABASE_URL="postgres://user:pass@localhost:5432/mydb" bun run app.js
|
||||
DATABASE_URL="postgresql://user:pass@localhost:5432/mydb" bun run app.js
|
||||
|
||||
# Or any URL that doesn't match MySQL or SQLite patterns
|
||||
DATABASE_URL="localhost:5432/mydb" bun run app.js
|
||||
```
|
||||
|
||||
### MySQL Environment Variables
|
||||
|
||||
MySQL connections can be configured via environment variables:
|
||||
|
||||
```bash
|
||||
# Primary connection URL (checked first)
|
||||
MYSQL_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
|
||||
# Alternative: DATABASE_URL with MySQL protocol
|
||||
DATABASE_URL="mysql://user:pass@localhost:3306/mydb"
|
||||
DATABASE_URL="mysql2://user:pass@localhost:3306/mydb"
|
||||
```
|
||||
|
||||
If no connection URL is provided, MySQL checks these individual parameters:
|
||||
|
||||
| Environment Variable | Default Value | Description |
|
||||
| ------------------------ | ------------- | -------------------------------- |
|
||||
| `MYSQL_HOST` | `localhost` | Database host |
|
||||
| `MYSQL_PORT` | `3306` | Database port |
|
||||
| `MYSQL_USER` | `root` | Database user |
|
||||
| `MYSQL_PASSWORD` | (empty) | Database password |
|
||||
| `MYSQL_DATABASE` | `mysql` | Database name |
|
||||
| `MYSQL_URL` | (empty) | Primary connection URL for MySQL |
|
||||
| `TLS_MYSQL_DATABASE_URL` | (empty) | SSL/TLS-enabled connection URL |
|
||||
|
||||
### PostgreSQL Environment Variables
|
||||
|
||||
The following environment variables can be used to define the PostgreSQL connection:
|
||||
|
||||
| Environment Variable | Description |
|
||||
| --------------------------- | ------------------------------------------ |
|
||||
| `POSTGRES_URL` | Primary connection URL for PostgreSQL |
|
||||
| `DATABASE_URL` | Alternative connection URL |
|
||||
| `DATABASE_URL` | Alternative connection URL (auto-detected) |
|
||||
| `PGURL` | Alternative connection URL |
|
||||
| `PG_URL` | Alternative connection URL |
|
||||
| `TLS_POSTGRES_DATABASE_URL` | SSL/TLS-enabled connection URL |
|
||||
@@ -274,6 +544,19 @@ If no connection URL is provided, the system checks for the following individual
|
||||
| `PGPASSWORD` | - | (empty) | Database password |
|
||||
| `PGDATABASE` | - | username | Database name |
|
||||
|
||||
### SQLite Environment Variables
|
||||
|
||||
SQLite connections can be configured via `DATABASE_URL` when it contains a SQLite-compatible URL:
|
||||
|
||||
```bash
|
||||
# These are all recognized as SQLite
|
||||
DATABASE_URL=":memory:"
|
||||
DATABASE_URL="sqlite://./app.db"
|
||||
DATABASE_URL="file:///absolute/path/to/db.sqlite"
|
||||
```
|
||||
|
||||
**Note:** PostgreSQL-specific environment variables (`POSTGRES_URL`, `PGHOST`, etc.) are ignored when using SQLite.
|
||||
|
||||
## Runtime Preconnection
|
||||
|
||||
Bun can preconnect to PostgreSQL at startup to improve performance by establishing database connections before your application code runs. This is useful for reducing connection latency on the first database query.
|
||||
@@ -293,16 +576,66 @@ The `--sql-preconnect` flag will automatically establish a PostgreSQL connection
|
||||
|
||||
## Connection Options
|
||||
|
||||
You can configure your database connection manually by passing options to the SQL constructor:
|
||||
You can configure your database connection manually by passing options to the SQL constructor. Options vary depending on the database adapter:
|
||||
|
||||
### MySQL Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Required
|
||||
// Required for MySQL when using options object
|
||||
adapter: "mysql",
|
||||
|
||||
// Connection details
|
||||
hostname: "localhost",
|
||||
port: 3306,
|
||||
database: "myapp",
|
||||
username: "dbuser",
|
||||
password: "secretpass",
|
||||
|
||||
// Unix socket connection (alternative to hostname/port)
|
||||
// socket: "/var/run/mysqld/mysqld.sock",
|
||||
|
||||
// Connection pool settings
|
||||
max: 20, // Maximum connections in pool (default: 10)
|
||||
idleTimeout: 30, // Close idle connections after 30s
|
||||
maxLifetime: 0, // Connection lifetime in seconds (0 = forever)
|
||||
connectionTimeout: 30, // Timeout when establishing new connections
|
||||
|
||||
// SSL/TLS options
|
||||
ssl: "prefer", // or "disable", "require", "verify-ca", "verify-full"
|
||||
// tls: {
|
||||
// rejectUnauthorized: true,
|
||||
// ca: "path/to/ca.pem",
|
||||
// key: "path/to/key.pem",
|
||||
// cert: "path/to/cert.pem",
|
||||
// },
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("Connected to MySQL");
|
||||
},
|
||||
onclose: (client, err) => {
|
||||
if (err) {
|
||||
console.error("MySQL connection error:", err);
|
||||
} else {
|
||||
console.log("MySQL connection closed");
|
||||
}
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### PostgreSQL Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Connection details (adapter is auto-detected as PostgreSQL)
|
||||
url: "postgres://user:pass@localhost:5432/dbname",
|
||||
|
||||
// Optional configuration
|
||||
// Alternative connection parameters
|
||||
hostname: "localhost",
|
||||
port: 5432,
|
||||
database: "myapp",
|
||||
@@ -330,14 +663,52 @@ const db = new SQL({
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("Connected to database");
|
||||
console.log("Connected to PostgreSQL");
|
||||
},
|
||||
onclose: client => {
|
||||
console.log("Connection closed");
|
||||
console.log("PostgreSQL connection closed");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### SQLite Options
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
const db = new SQL({
|
||||
// Required for SQLite
|
||||
adapter: "sqlite",
|
||||
filename: "./data/app.db", // or ":memory:" for in-memory database
|
||||
|
||||
// SQLite-specific access modes
|
||||
readonly: false, // Open in read-only mode
|
||||
create: true, // Create database if it doesn't exist
|
||||
readwrite: true, // Allow read and write operations
|
||||
|
||||
// SQLite data handling
|
||||
strict: true, // Enable strict mode for better type safety
|
||||
safeIntegers: false, // Use BigInt for integers exceeding JS number range
|
||||
|
||||
// Callbacks
|
||||
onconnect: client => {
|
||||
console.log("SQLite database opened");
|
||||
},
|
||||
onclose: client => {
|
||||
console.log("SQLite database closed");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
{% details summary="SQLite Connection Notes" %}
|
||||
|
||||
- **Connection Pooling**: SQLite doesn't use connection pooling as it's a file-based database. Each `SQL` instance represents a single connection.
|
||||
- **Transactions**: SQLite supports nested transactions through savepoints, similar to PostgreSQL.
|
||||
- **Concurrent Access**: SQLite handles concurrent access through file locking. Use WAL mode for better concurrency.
|
||||
- **Memory Databases**: Using `:memory:` creates a temporary database that exists only for the connection lifetime.
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Dynamic passwords
|
||||
|
||||
When clients need to use alternative authentication schemes such as access tokens or connections to databases with rotating passwords, provide either a synchronous or asynchronous function that will resolve the dynamic password value at connection time.
|
||||
@@ -353,11 +724,66 @@ const sql = new SQL(url, {
|
||||
});
|
||||
```
|
||||
|
||||
## SQLite-Specific Features
|
||||
|
||||
### Query Execution
|
||||
|
||||
SQLite executes queries synchronously, unlike PostgreSQL which uses asynchronous I/O. However, the API remains consistent using Promises:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// Works the same as PostgreSQL, but executes synchronously under the hood
|
||||
const users = await sqlite`SELECT * FROM users`;
|
||||
|
||||
// Parameters work identically
|
||||
const user = await sqlite`SELECT * FROM users WHERE id = ${userId}`;
|
||||
```
|
||||
|
||||
### SQLite Pragmas
|
||||
|
||||
You can use PRAGMA statements to configure SQLite behavior:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// Enable foreign keys
|
||||
await sqlite`PRAGMA foreign_keys = ON`;
|
||||
|
||||
// Set journal mode to WAL for better concurrency
|
||||
await sqlite`PRAGMA journal_mode = WAL`;
|
||||
|
||||
// Check integrity
|
||||
const integrity = await sqlite`PRAGMA integrity_check`;
|
||||
```
|
||||
|
||||
### Data Type Differences
|
||||
|
||||
SQLite has a more flexible type system than PostgreSQL:
|
||||
|
||||
```ts
|
||||
// SQLite stores data in 5 storage classes: NULL, INTEGER, REAL, TEXT, BLOB
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
// SQLite is more lenient with types
|
||||
await sqlite`
|
||||
CREATE TABLE flexible (
|
||||
id INTEGER PRIMARY KEY,
|
||||
data TEXT, -- Can store numbers as strings
|
||||
value NUMERIC, -- Can store integers, reals, or text
|
||||
blob BLOB -- Binary data
|
||||
)
|
||||
`;
|
||||
|
||||
// JavaScript values are automatically converted
|
||||
await sqlite`INSERT INTO flexible VALUES (${1}, ${"text"}, ${123.45}, ${Buffer.from("binary")})`;
|
||||
```
|
||||
|
||||
## Transactions
|
||||
|
||||
To start a new transaction, use `sql.begin`. This method reserves a dedicated connection for the duration of the transaction and provides a scoped `sql` instance to use within the callback function. Once the callback completes, `sql.begin` resolves with the return value of the callback.
|
||||
To start a new transaction, use `sql.begin`. This method works for both PostgreSQL and SQLite. For PostgreSQL, it reserves a dedicated connection from the pool. For SQLite, it begins a transaction on the single connection.
|
||||
|
||||
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to release the reserved connection and ensure the process continues smoothly.
|
||||
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to ensure the process continues smoothly.
|
||||
|
||||
### Basic Transactions
|
||||
|
||||
@@ -552,9 +978,36 @@ Note that disabling prepared statements may impact performance for queries that
|
||||
|
||||
## Error Handling
|
||||
|
||||
The client provides typed errors for different failure scenarios:
|
||||
The client provides typed errors for different failure scenarios. Errors are database-specific and extend from base error classes:
|
||||
|
||||
### Connection Errors
|
||||
### Error Classes
|
||||
|
||||
```ts
|
||||
import { SQL } from "bun";
|
||||
|
||||
try {
|
||||
await sql`SELECT * FROM users`;
|
||||
} catch (error) {
|
||||
if (error instanceof SQL.PostgresError) {
|
||||
// PostgreSQL-specific error
|
||||
console.log(error.code); // PostgreSQL error code
|
||||
console.log(error.detail); // Detailed error message
|
||||
console.log(error.hint); // Helpful hint from PostgreSQL
|
||||
} else if (error instanceof SQL.SQLiteError) {
|
||||
// SQLite-specific error
|
||||
console.log(error.code); // SQLite error code (e.g., "SQLITE_CONSTRAINT")
|
||||
console.log(error.errno); // SQLite error number
|
||||
console.log(error.byteOffset); // Byte offset in SQL statement (if available)
|
||||
} else if (error instanceof SQL.SQLError) {
|
||||
// Generic SQL error (base class)
|
||||
console.log(error.message);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{% details summary="PostgreSQL-Specific Error Codes" %}
|
||||
|
||||
### PostgreSQL Connection Errors
|
||||
|
||||
| Connection Errors | Description |
|
||||
| --------------------------------- | ---------------------------------------------------- |
|
||||
@@ -619,6 +1072,51 @@ The client provides typed errors for different failure scenarios:
|
||||
| `ERR_POSTGRES_UNSAFE_TRANSACTION` | Unsafe transaction operation detected |
|
||||
| `ERR_POSTGRES_INVALID_TRANSACTION_STATE` | Invalid transaction state |
|
||||
|
||||
{% /details %}
|
||||
|
||||
### SQLite-Specific Errors
|
||||
|
||||
SQLite errors provide error codes and numbers that correspond to SQLite's standard error codes:
|
||||
|
||||
{% details summary="Common SQLite Error Codes" %}
|
||||
|
||||
| Error Code | errno | Description |
|
||||
| ------------------- | ----- | ---------------------------------------------------- |
|
||||
| `SQLITE_CONSTRAINT` | 19 | Constraint violation (UNIQUE, CHECK, NOT NULL, etc.) |
|
||||
| `SQLITE_BUSY` | 5 | Database is locked |
|
||||
| `SQLITE_LOCKED` | 6 | Table in the database is locked |
|
||||
| `SQLITE_READONLY` | 8 | Attempt to write to a readonly database |
|
||||
| `SQLITE_IOERR` | 10 | Disk I/O error |
|
||||
| `SQLITE_CORRUPT` | 11 | Database disk image is malformed |
|
||||
| `SQLITE_FULL` | 13 | Database or disk is full |
|
||||
| `SQLITE_CANTOPEN` | 14 | Unable to open database file |
|
||||
| `SQLITE_PROTOCOL` | 15 | Database lock protocol error |
|
||||
| `SQLITE_SCHEMA` | 17 | Database schema has changed |
|
||||
| `SQLITE_TOOBIG` | 18 | String or BLOB exceeds size limit |
|
||||
| `SQLITE_MISMATCH` | 20 | Data type mismatch |
|
||||
| `SQLITE_MISUSE` | 21 | Library used incorrectly |
|
||||
| `SQLITE_AUTH` | 23 | Authorization denied |
|
||||
|
||||
Example error handling:
|
||||
|
||||
```ts
|
||||
const sqlite = new SQL("sqlite://app.db");
|
||||
|
||||
try {
|
||||
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Alice')`;
|
||||
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Bob')`; // Duplicate ID
|
||||
} catch (error) {
|
||||
if (error instanceof SQL.SQLiteError) {
|
||||
if (error.code === "SQLITE_CONSTRAINT") {
|
||||
console.log("Constraint violation:", error.message);
|
||||
// Handle unique constraint violation
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
{% /details %}
|
||||
|
||||
## Numbers and BigInt
|
||||
|
||||
Bun's SQL client includes special handling for large numbers that exceed the range of a 53-bit integer. Here's how it works:
|
||||
@@ -651,12 +1149,106 @@ console.log(typeof x, x); // "bigint" 9223372036854777n
|
||||
There's still some things we haven't finished yet.
|
||||
|
||||
- Connection preloading via `--db-preconnect` Bun CLI flag
|
||||
- MySQL support: [we're working on it](https://github.com/oven-sh/bun/pull/15274)
|
||||
- SQLite support: planned, but not started. Ideally, we implement it natively instead of wrapping `bun:sqlite`.
|
||||
- Column name transforms (e.g. `snake_case` to `camelCase`). This is mostly blocked on a unicode-aware implementation of changing the case in C++ using WebKit's `WTF::String`.
|
||||
- Column type transforms
|
||||
|
||||
### Postgres-specific features
|
||||
## Database-Specific Features
|
||||
|
||||
#### Authentication Methods
|
||||
|
||||
MySQL supports multiple authentication plugins that are automatically negotiated:
|
||||
|
||||
- **`mysql_native_password`** - Traditional MySQL authentication, widely compatible
|
||||
- **`caching_sha2_password`** - Default in MySQL 8.0+, more secure with RSA key exchange
|
||||
- **`sha256_password`** - SHA-256 based authentication
|
||||
|
||||
The client automatically handles authentication plugin switching when requested by the server, including secure password exchange over non-SSL connections.
|
||||
|
||||
#### Prepared Statements & Performance
|
||||
|
||||
MySQL uses server-side prepared statements for all parameterized queries:
|
||||
|
||||
```ts
|
||||
// This automatically creates a prepared statement on the server
|
||||
const user = await mysql`SELECT * FROM users WHERE id = ${userId}`;
|
||||
|
||||
// Prepared statements are cached and reused for identical queries
|
||||
for (const id of userIds) {
|
||||
// Same prepared statement is reused
|
||||
await mysql`SELECT * FROM users WHERE id = ${id}`;
|
||||
}
|
||||
|
||||
// Query pipelining - multiple statements sent without waiting
|
||||
const [users, orders, products] = await Promise.all([
|
||||
mysql`SELECT * FROM users WHERE active = ${true}`,
|
||||
mysql`SELECT * FROM orders WHERE status = ${"pending"}`,
|
||||
mysql`SELECT * FROM products WHERE in_stock = ${true}`,
|
||||
]);
|
||||
```
|
||||
|
||||
#### Multiple Result Sets
|
||||
|
||||
MySQL can return multiple result sets from multi-statement queries:
|
||||
|
||||
```ts
|
||||
const mysql = new SQL("mysql://user:pass@localhost/mydb");
|
||||
|
||||
// Multi-statement queries with simple() method
|
||||
const multiResults = await mysql`
|
||||
SELECT * FROM users WHERE id = 1;
|
||||
SELECT * FROM orders WHERE user_id = 1;
|
||||
`.simple();
|
||||
```
|
||||
|
||||
#### Character Sets & Collations
|
||||
|
||||
Bun.SQL automatically uses `utf8mb4` character set for MySQL connections, ensuring full Unicode support including emojis. This is the recommended character set for modern MySQL applications.
|
||||
|
||||
#### Connection Attributes
|
||||
|
||||
Bun automatically sends client information to MySQL for better monitoring:
|
||||
|
||||
```ts
|
||||
// These attributes are sent automatically:
|
||||
// _client_name: "Bun"
|
||||
// _client_version: <bun version>
|
||||
// You can see these in MySQL's performance_schema.session_connect_attrs
|
||||
```
|
||||
|
||||
#### Type Handling
|
||||
|
||||
MySQL types are automatically converted to JavaScript types:
|
||||
|
||||
| MySQL Type | JavaScript Type | Notes |
|
||||
| --------------------------------------- | ------------------------ | ---------------------------------------------------------------------------------------------------- |
|
||||
| INT, TINYINT, MEDIUMINT | number | Within safe integer range |
|
||||
| BIGINT | string, number or BigInt | If the value fits in i32/u32 size will be number otherwise string or BigInt Based on `bigint` option |
|
||||
| DECIMAL, NUMERIC | string | To preserve precision |
|
||||
| FLOAT, DOUBLE | number | |
|
||||
| DATE | Date | JavaScript Date object |
|
||||
| DATETIME, TIMESTAMP | Date | With timezone handling |
|
||||
| TIME | number | Total of microseconds |
|
||||
| YEAR | number | |
|
||||
| CHAR, VARCHAR, VARSTRING, STRING | string | |
|
||||
| TINY TEXT, MEDIUM TEXT, TEXT, LONG TEXT | string | |
|
||||
| TINY BLOB, MEDIUM BLOB, BLOG, LONG BLOB | string | BLOB Types are alias for TEXT types |
|
||||
| JSON | object/array | Automatically parsed |
|
||||
| BIT(1) | boolean | BIT(1) in MySQL |
|
||||
| GEOMETRY | string | Geometry data |
|
||||
|
||||
#### Differences from PostgreSQL
|
||||
|
||||
While the API is unified, there are some behavioral differences:
|
||||
|
||||
1. **Parameter placeholders**: MySQL uses `?` internally but Bun converts `$1, $2` style automatically
|
||||
2. **RETURNING clause**: MySQL doesn't support RETURNING; use `result.lastInsertRowid` or a separate SELECT
|
||||
3. **Array types**: MySQL doesn't have native array types like PostgreSQL
|
||||
|
||||
### MySQL-Specific Features
|
||||
|
||||
We haven't implemented `LOAD DATA INFILE` support yet
|
||||
|
||||
### PostgreSQL-Specific Features
|
||||
|
||||
We haven't implemented these yet:
|
||||
|
||||
@@ -671,13 +1263,89 @@ We also haven't implemented some of the more uncommon features like:
|
||||
- Point & PostGIS types
|
||||
- All the multi-dimensional integer array types (only a couple of the types are supported)
|
||||
|
||||
## Common Patterns & Best Practices
|
||||
|
||||
### Working with MySQL Result Sets
|
||||
|
||||
```ts
|
||||
// Getting insert ID after INSERT
|
||||
const result = await mysql`INSERT INTO users (name) VALUES (${"Alice"})`;
|
||||
console.log(result.lastInsertRowid); // MySQL's LAST_INSERT_ID()
|
||||
|
||||
// Handling affected rows
|
||||
const updated =
|
||||
await mysql`UPDATE users SET active = ${false} WHERE age < ${18}`;
|
||||
console.log(updated.affectedRows); // Number of rows updated
|
||||
|
||||
// Using MySQL-specific functions
|
||||
const now = await mysql`SELECT NOW() as current_time`;
|
||||
const uuid = await mysql`SELECT UUID() as id`;
|
||||
```
|
||||
|
||||
### MySQL Error Handling
|
||||
|
||||
```ts
|
||||
try {
|
||||
await mysql`INSERT INTO users (email) VALUES (${"duplicate@email.com"})`;
|
||||
} catch (error) {
|
||||
if (error.code === "ER_DUP_ENTRY") {
|
||||
console.log("Duplicate entry detected");
|
||||
} else if (error.code === "ER_ACCESS_DENIED_ERROR") {
|
||||
console.log("Access denied");
|
||||
} else if (error.code === "ER_BAD_DB_ERROR") {
|
||||
console.log("Database does not exist");
|
||||
}
|
||||
// MySQL error codes are compatible with mysql/mysql2 packages
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Tips for MySQL
|
||||
|
||||
1. **Use connection pooling**: Set appropriate `max` pool size based on your workload
|
||||
2. **Enable prepared statements**: They're enabled by default and improve performance
|
||||
3. **Use transactions for bulk operations**: Group related queries in transactions
|
||||
4. **Index properly**: MySQL relies heavily on indexes for query performance
|
||||
5. **Use `utf8mb4` charset**: It's set by default and handles all Unicode characters
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
> Why is this `Bun.sql` and not `Bun.postgres`?
|
||||
|
||||
The plan is to add more database drivers in the future.
|
||||
The plan was to add more database drivers in the future. Now with MySQL support added, this unified API supports PostgreSQL, MySQL, and SQLite.
|
||||
|
||||
> Why not just use an existing library?
|
||||
> How do I know which database adapter is being used?
|
||||
|
||||
The adapter is automatically detected from the connection string:
|
||||
|
||||
- URLs starting with `mysql://` or `mysql2://` use MySQL
|
||||
- URLs matching SQLite patterns (`:memory:`, `sqlite://`, `file://`) use SQLite
|
||||
- Everything else defaults to PostgreSQL
|
||||
|
||||
> Are MySQL stored procedures supported?
|
||||
|
||||
Yes, stored procedures are fully supported including OUT parameters and multiple result sets:
|
||||
|
||||
```ts
|
||||
// Call stored procedure
|
||||
const results = await mysql`CALL GetUserStats(${userId}, @total_orders)`;
|
||||
|
||||
// Get OUT parameter
|
||||
const outParam = await mysql`SELECT @total_orders as total`;
|
||||
```
|
||||
|
||||
> Can I use MySQL-specific SQL syntax?
|
||||
|
||||
Yes, you can use any MySQL-specific syntax:
|
||||
|
||||
```ts
|
||||
// MySQL-specific syntax works fine
|
||||
await mysql`SET @user_id = ${userId}`;
|
||||
await mysql`SHOW TABLES`;
|
||||
await mysql`DESCRIBE users`;
|
||||
await mysql`EXPLAIN SELECT * FROM users WHERE id = ${id}`;
|
||||
```
|
||||
|
||||
## Why not just use an existing library?
|
||||
|
||||
npm packages like postgres.js, pg, and node-postgres can be used in Bun too. They're great options.
|
||||
|
||||
|
||||
@@ -772,6 +772,65 @@ console.log(obj); // => { foo: "bar" }
|
||||
|
||||
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
|
||||
|
||||
## `Bun.stripANSI()` ~6-57x faster `strip-ansi` alternative
|
||||
|
||||
`Bun.stripANSI(text: string): string`
|
||||
|
||||
Strip ANSI escape codes from a string. This is useful for removing colors and formatting from terminal output.
|
||||
|
||||
```ts
|
||||
const coloredText = "\u001b[31mHello\u001b[0m \u001b[32mWorld\u001b[0m";
|
||||
const plainText = Bun.stripANSI(coloredText);
|
||||
console.log(plainText); // => "Hello World"
|
||||
|
||||
// Works with various ANSI codes
|
||||
const formatted = "\u001b[1m\u001b[4mBold and underlined\u001b[0m";
|
||||
console.log(Bun.stripANSI(formatted)); // => "Bold and underlined"
|
||||
```
|
||||
|
||||
`Bun.stripANSI` is significantly faster than the popular [`strip-ansi`](https://www.npmjs.com/package/strip-ansi) npm package:
|
||||
|
||||
```js
|
||||
> bun bench/snippets/strip-ansi.mjs
|
||||
cpu: Apple M3 Max
|
||||
runtime: bun 1.2.21 (arm64-darwin)
|
||||
|
||||
benchmark avg (min … max) p75 / p99
|
||||
------------------------------------------------------- ----------
|
||||
Bun.stripANSI 11 chars no-ansi 8.13 ns/iter 8.27 ns
|
||||
(7.45 ns … 33.59 ns) 10.29 ns
|
||||
|
||||
Bun.stripANSI 13 chars ansi 51.68 ns/iter 52.51 ns
|
||||
(46.16 ns … 113.71 ns) 57.71 ns
|
||||
|
||||
Bun.stripANSI 16,384 chars long-no-ansi 298.39 ns/iter 305.44 ns
|
||||
(281.50 ns … 331.65 ns) 320.70 ns
|
||||
|
||||
Bun.stripANSI 212,992 chars long-ansi 227.65 µs/iter 234.50 µs
|
||||
(216.46 µs … 401.92 µs) 262.25 µs
|
||||
```
|
||||
|
||||
```js
|
||||
> node bench/snippets/strip-ansi.mjs
|
||||
cpu: Apple M3 Max
|
||||
runtime: node 24.6.0 (arm64-darwin)
|
||||
|
||||
benchmark avg (min … max) p75 / p99
|
||||
-------------------------------------------------------- ---------
|
||||
npm/strip-ansi 11 chars no-ansi 466.79 ns/iter 468.67 ns
|
||||
(454.08 ns … 570.67 ns) 543.67 ns
|
||||
|
||||
npm/strip-ansi 13 chars ansi 546.77 ns/iter 550.23 ns
|
||||
(532.74 ns … 651.08 ns) 590.35 ns
|
||||
|
||||
npm/strip-ansi 16,384 chars long-no-ansi 4.85 µs/iter 4.89 µs
|
||||
(4.71 µs … 5.00 µs) 4.98 µs
|
||||
|
||||
npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
|
||||
(1.27 ms … 1.73 ms) 1.49 ms
|
||||
|
||||
```
|
||||
|
||||
## `estimateShallowMemoryUsageOf` in `bun:jsc`
|
||||
|
||||
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{% callout %}
|
||||
**🚧** — The `Worker` API is still experimental and should not be considered ready for production.
|
||||
**🚧** — The `Worker` API is still experimental (particularly for terminating workers). We are actively working on improving this.
|
||||
{% /callout %}
|
||||
|
||||
[`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) lets you start and communicate with a new JavaScript instance running on a separate thread while sharing I/O resources with the main thread.
|
||||
@@ -122,6 +122,59 @@ Messages are automatically enqueued until the worker is ready, so there is no ne
|
||||
|
||||
To send messages, use [`worker.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Worker/postMessage) and [`self.postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage). This leverages the [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm).
|
||||
|
||||
### Performance optimizations
|
||||
|
||||
Bun includes optimized fast paths for `postMessage` to dramatically improve performance for common data types:
|
||||
|
||||
**String fast path** - When posting pure string values, Bun bypasses the structured clone algorithm entirely, achieving significant performance gains with no serialization overhead.
|
||||
|
||||
**Simple object fast path** - For plain objects containing only primitive values (strings, numbers, booleans, null, undefined), Bun uses an optimized serialization path that stores properties directly without full structured cloning.
|
||||
|
||||
The simple object fast path activates when the object:
|
||||
|
||||
- Is a plain object with no prototype chain modifications
|
||||
- Contains only enumerable, configurable data properties
|
||||
- Has no indexed properties or getter/setter methods
|
||||
- All property values are primitives or strings
|
||||
|
||||
With these fast paths, Bun's `postMessage` performs **2-241x faster** because the message length no longer has a meaningful impact on performance.
|
||||
|
||||
**Bun (with fast paths):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 648ns
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 719ns
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 1.26µs
|
||||
```
|
||||
|
||||
**Node.js v24.6.0 (for comparison):**
|
||||
|
||||
```
|
||||
postMessage({ prop: 11 chars string, ...9 more props }) - 1.19µs
|
||||
postMessage({ prop: 14 KB string, ...9 more props }) - 2.69µs
|
||||
postMessage({ prop: 3 MB string, ...9 more props }) - 304µs
|
||||
```
|
||||
|
||||
```js
|
||||
// String fast path - optimized
|
||||
postMessage("Hello, worker!");
|
||||
|
||||
// Simple object fast path - optimized
|
||||
postMessage({
|
||||
message: "Hello",
|
||||
count: 42,
|
||||
enabled: true,
|
||||
data: null,
|
||||
});
|
||||
|
||||
// Complex objects still work but use standard structured clone
|
||||
postMessage({
|
||||
nested: { deep: { object: true } },
|
||||
date: new Date(),
|
||||
buffer: new ArrayBuffer(8),
|
||||
});
|
||||
```
|
||||
|
||||
```js
|
||||
// On the worker thread, `postMessage` is automatically "routed" to the parent thread.
|
||||
postMessage({ hello: "world" });
|
||||
|
||||
459
docs/api/yaml.md
Normal file
459
docs/api/yaml.md
Normal file
@@ -0,0 +1,459 @@
|
||||
In Bun, YAML is a first-class citizen alongside JSON and TOML.
|
||||
|
||||
Bun provides built-in support for YAML files through both runtime APIs and bundler integration. You can
|
||||
|
||||
- Parse YAML strings with `Bun.YAML.parse`
|
||||
- import & require YAML files as modules at runtime (including hot reloading & watch mode support)
|
||||
- import & require YAML files in frontend apps via bun's bundler
|
||||
|
||||
## Conformance
|
||||
|
||||
Bun's YAML parser currently passes over 90% of the official YAML test suite. While we're actively working on reaching 100% conformance, the current implementation covers the vast majority of real-world use cases. The parser is written in Zig for optimal performance and is continuously being improved.
|
||||
|
||||
## Runtime API
|
||||
|
||||
### `Bun.YAML.parse()`
|
||||
|
||||
Parse a YAML string into a JavaScript object.
|
||||
|
||||
```ts
|
||||
import { YAML } from "bun";
|
||||
const text = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
email: john@example.com
|
||||
hobbies:
|
||||
- reading
|
||||
- coding
|
||||
- hiking
|
||||
`;
|
||||
|
||||
const data = YAML.parse(text);
|
||||
console.log(data);
|
||||
// {
|
||||
// name: "John Doe",
|
||||
// age: 30,
|
||||
// email: "john@example.com",
|
||||
// hobbies: ["reading", "coding", "hiking"]
|
||||
// }
|
||||
```
|
||||
|
||||
#### Multi-document YAML
|
||||
|
||||
When parsing YAML with multiple documents (separated by `---`), `Bun.YAML.parse()` returns an array:
|
||||
|
||||
```ts
|
||||
const multiDoc = `
|
||||
---
|
||||
name: Document 1
|
||||
---
|
||||
name: Document 2
|
||||
---
|
||||
name: Document 3
|
||||
`;
|
||||
|
||||
const docs = Bun.YAML.parse(multiDoc);
|
||||
console.log(docs);
|
||||
// [
|
||||
// { name: "Document 1" },
|
||||
// { name: "Document 2" },
|
||||
// { name: "Document 3" }
|
||||
// ]
|
||||
```
|
||||
|
||||
#### Supported YAML Features
|
||||
|
||||
Bun's YAML parser supports the full YAML 1.2 specification, including:
|
||||
|
||||
- **Scalars**: strings, numbers, booleans, null values
|
||||
- **Collections**: sequences (arrays) and mappings (objects)
|
||||
- **Anchors and Aliases**: reusable nodes with `&` and `*`
|
||||
- **Tags**: type hints like `!!str`, `!!int`, `!!float`, `!!bool`, `!!null`
|
||||
- **Multi-line strings**: literal (`|`) and folded (`>`) scalars
|
||||
- **Comments**: using `#`
|
||||
- **Directives**: `%YAML` and `%TAG`
|
||||
|
||||
```ts
|
||||
const yaml = `
|
||||
# Employee record
|
||||
employee: &emp
|
||||
name: Jane Smith
|
||||
department: Engineering
|
||||
skills:
|
||||
- JavaScript
|
||||
- TypeScript
|
||||
- React
|
||||
|
||||
manager: *emp # Reference to employee
|
||||
|
||||
config: !!str 123 # Explicit string type
|
||||
|
||||
description: |
|
||||
This is a multi-line
|
||||
literal string that preserves
|
||||
line breaks and spacing.
|
||||
|
||||
summary: >
|
||||
This is a folded string
|
||||
that joins lines with spaces
|
||||
unless there are blank lines.
|
||||
`;
|
||||
|
||||
const data = Bun.YAML.parse(yaml);
|
||||
```
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.YAML.parse()` throws a `SyntaxError` if the YAML is invalid:
|
||||
|
||||
```ts
|
||||
try {
|
||||
Bun.YAML.parse("invalid: yaml: content:");
|
||||
} catch (error) {
|
||||
console.error("Failed to parse YAML:", error.message);
|
||||
}
|
||||
```
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
|
||||
You can import YAML files directly as ES modules. The YAML content is parsed and made available as both default and named exports:
|
||||
|
||||
```yaml#config.yaml
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: myapp
|
||||
|
||||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
|
||||
features:
|
||||
auth: true
|
||||
rateLimit: true
|
||||
analytics: false
|
||||
```
|
||||
|
||||
#### Default Import
|
||||
|
||||
```ts#app.ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
console.log(config.database.host); // "localhost"
|
||||
console.log(config.redis.port); // 6379
|
||||
```
|
||||
|
||||
#### Named Imports
|
||||
|
||||
You can destructure top-level YAML properties as named imports:
|
||||
|
||||
```ts
|
||||
import { database, redis, features } from "./config.yaml";
|
||||
|
||||
console.log(database.host); // "localhost"
|
||||
console.log(redis.port); // 6379
|
||||
console.log(features.auth); // true
|
||||
```
|
||||
|
||||
Or combine both:
|
||||
|
||||
```ts
|
||||
import config, { database, features } from "./config.yaml";
|
||||
|
||||
// Use the full config object
|
||||
console.log(config);
|
||||
|
||||
// Or use specific parts
|
||||
if (features.rateLimit) {
|
||||
setupRateLimiting(database);
|
||||
}
|
||||
```
|
||||
|
||||
### CommonJS
|
||||
|
||||
YAML files can also be required in CommonJS:
|
||||
|
||||
```js
|
||||
const config = require("./config.yaml");
|
||||
console.log(config.database.name); // "myapp"
|
||||
|
||||
// Destructuring also works
|
||||
const { database, redis } = require("./config.yaml");
|
||||
console.log(database.port); // 5432
|
||||
```
|
||||
|
||||
## Hot Reloading with YAML
|
||||
|
||||
One of the most powerful features of Bun's YAML support is hot reloading. When you run your application with `bun --hot`, changes to YAML files are automatically detected and reloaded without closing connections
|
||||
|
||||
### Configuration Hot Reloading
|
||||
|
||||
```yaml#config.yaml
|
||||
server:
|
||||
port: 3000
|
||||
host: localhost
|
||||
|
||||
features:
|
||||
debug: true
|
||||
verbose: false
|
||||
```
|
||||
|
||||
```ts#server.ts
|
||||
import { server, features } from "./config.yaml";
|
||||
|
||||
console.log(`Starting server on ${server.host}:${server.port}`);
|
||||
|
||||
if (features.debug) {
|
||||
console.log("Debug mode enabled");
|
||||
}
|
||||
|
||||
// Your server code here
|
||||
Bun.serve({
|
||||
port: server.port,
|
||||
hostname: server.host,
|
||||
fetch(req) {
|
||||
if (features.verbose) {
|
||||
console.log(`${req.method} ${req.url}`);
|
||||
}
|
||||
return new Response("Hello World");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Run with hot reloading:
|
||||
|
||||
```bash
|
||||
bun --hot server.ts
|
||||
```
|
||||
|
||||
Now when you modify `config.yaml`, the changes are immediately reflected in your running application. This is perfect for:
|
||||
|
||||
- Adjusting configuration during development
|
||||
- Testing different settings without restarts
|
||||
- Live debugging with configuration changes
|
||||
- Feature flag toggling
|
||||
|
||||
## Configuration Management
|
||||
|
||||
### Environment-Based Configuration
|
||||
|
||||
YAML excels at managing configuration across different environments:
|
||||
|
||||
```yaml#config.yaml
|
||||
defaults: &defaults
|
||||
timeout: 5000
|
||||
retries: 3
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 3600
|
||||
|
||||
development:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: http://localhost:4000
|
||||
key: dev_key_12345
|
||||
logging:
|
||||
level: debug
|
||||
pretty: true
|
||||
|
||||
staging:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: https://staging-api.example.com
|
||||
key: ${STAGING_API_KEY}
|
||||
logging:
|
||||
level: info
|
||||
pretty: false
|
||||
|
||||
production:
|
||||
<<: *defaults
|
||||
api:
|
||||
url: https://api.example.com
|
||||
key: ${PROD_API_KEY}
|
||||
cache:
|
||||
enabled: true
|
||||
ttl: 86400
|
||||
logging:
|
||||
level: error
|
||||
pretty: false
|
||||
```
|
||||
|
||||
```ts#app.ts
|
||||
import configs from "./config.yaml";
|
||||
|
||||
const env = process.env.NODE_ENV || "development";
|
||||
const config = configs[env];
|
||||
|
||||
// Environment variables in YAML values can be interpolated
|
||||
function interpolateEnvVars(obj: any): any {
|
||||
if (typeof obj === "string") {
|
||||
return obj.replace(/\${(\w+)}/g, (_, key) => process.env[key] || "");
|
||||
}
|
||||
if (typeof obj === "object") {
|
||||
for (const key in obj) {
|
||||
obj[key] = interpolateEnvVars(obj[key]);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
export default interpolateEnvVars(config);
|
||||
```
|
||||
|
||||
### Feature Flags Configuration
|
||||
|
||||
```yaml#features.yaml
|
||||
features:
|
||||
newDashboard:
|
||||
enabled: true
|
||||
rolloutPercentage: 50
|
||||
allowedUsers:
|
||||
- admin@example.com
|
||||
- beta@example.com
|
||||
|
||||
experimentalAPI:
|
||||
enabled: false
|
||||
endpoints:
|
||||
- /api/v2/experimental
|
||||
- /api/v2/beta
|
||||
|
||||
darkMode:
|
||||
enabled: true
|
||||
default: auto # auto, light, dark
|
||||
```
|
||||
|
||||
```ts#feature-flags.ts
|
||||
import { features } from "./features.yaml";
|
||||
|
||||
export function isFeatureEnabled(
|
||||
featureName: string,
|
||||
userEmail?: string,
|
||||
): boolean {
|
||||
const feature = features[featureName];
|
||||
|
||||
if (!feature?.enabled) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check rollout percentage
|
||||
if (feature.rolloutPercentage < 100) {
|
||||
const hash = hashCode(userEmail || "anonymous");
|
||||
if (hash % 100 >= feature.rolloutPercentage) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check allowed users
|
||||
if (feature.allowedUsers && userEmail) {
|
||||
return feature.allowedUsers.includes(userEmail);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Use with hot reloading to toggle features in real-time
|
||||
if (isFeatureEnabled("newDashboard", user.email)) {
|
||||
renderNewDashboard();
|
||||
} else {
|
||||
renderLegacyDashboard();
|
||||
}
|
||||
```
|
||||
|
||||
### Database Configuration
|
||||
|
||||
```yaml#database.yaml
|
||||
connections:
|
||||
primary:
|
||||
type: postgres
|
||||
host: ${DB_HOST:-localhost}
|
||||
port: ${DB_PORT:-5432}
|
||||
database: ${DB_NAME:-myapp}
|
||||
username: ${DB_USER:-postgres}
|
||||
password: ${DB_PASS}
|
||||
pool:
|
||||
min: 2
|
||||
max: 10
|
||||
idleTimeout: 30000
|
||||
|
||||
cache:
|
||||
type: redis
|
||||
host: ${REDIS_HOST:-localhost}
|
||||
port: ${REDIS_PORT:-6379}
|
||||
password: ${REDIS_PASS}
|
||||
db: 0
|
||||
|
||||
analytics:
|
||||
type: clickhouse
|
||||
host: ${ANALYTICS_HOST:-localhost}
|
||||
port: 8123
|
||||
database: analytics
|
||||
|
||||
migrations:
|
||||
autoRun: ${AUTO_MIGRATE:-false}
|
||||
directory: ./migrations
|
||||
|
||||
seeds:
|
||||
enabled: ${SEED_DB:-false}
|
||||
directory: ./seeds
|
||||
```
|
||||
|
||||
```ts#db.ts
|
||||
import { connections, migrations } from "./database.yaml";
|
||||
import { createConnection } from "./database-driver";
|
||||
|
||||
// Parse environment variables with defaults
|
||||
function parseConfig(config: any) {
|
||||
return JSON.parse(
|
||||
JSON.stringify(config).replace(
|
||||
/\${([^:-]+)(?::([^}]+))?}/g,
|
||||
(_, key, defaultValue) => process.env[key] || defaultValue || "",
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
const dbConfig = parseConfig(connections);
|
||||
|
||||
export const db = await createConnection(dbConfig.primary);
|
||||
export const cache = await createConnection(dbConfig.cache);
|
||||
export const analytics = await createConnection(dbConfig.analytics);
|
||||
|
||||
// Auto-run migrations if configured
|
||||
if (parseConfig(migrations).autoRun === "true") {
|
||||
await runMigrations(db, migrations.directory);
|
||||
}
|
||||
```
|
||||
|
||||
### Bundler Integration
|
||||
|
||||
When you import YAML files in your application and bundle it with Bun, the YAML is parsed at build time and included as a JavaScript module:
|
||||
|
||||
```bash
|
||||
bun build app.ts --outdir=dist
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- Zero runtime YAML parsing overhead in production
|
||||
- Smaller bundle sizes
|
||||
- Tree-shaking support for unused configuration (named imports)
|
||||
|
||||
### Dynamic Imports
|
||||
|
||||
YAML files can be dynamically imported, useful for loading configuration on demand:
|
||||
|
||||
```ts#Load configuration based on environment
|
||||
const env = process.env.NODE_ENV || "development";
|
||||
const config = await import(`./configs/${env}.yaml`);
|
||||
|
||||
// Load user-specific settings
|
||||
async function loadUserSettings(userId: string) {
|
||||
try {
|
||||
const settings = await import(`./users/${userId}/settings.yaml`);
|
||||
return settings.default;
|
||||
} catch {
|
||||
return await import("./users/default-settings.yaml");
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -408,16 +408,119 @@ $ bun build --compile --asset-naming="[name].[ext]" ./index.ts
|
||||
|
||||
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
|
||||
|
||||
## Using Bun.build() API
|
||||
|
||||
You can also generate standalone executables using the `Bun.build()` JavaScript API. This is useful when you need programmatic control over the build process.
|
||||
|
||||
### Basic usage
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: {
|
||||
target: "bun-windows-x64",
|
||||
outfile: "myapp.exe",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Windows metadata with Bun.build()
|
||||
|
||||
When targeting Windows, you can specify metadata through the `windows` object:
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: {
|
||||
target: "bun-windows-x64",
|
||||
outfile: "myapp.exe",
|
||||
windows: {
|
||||
title: "My Application",
|
||||
publisher: "My Company Inc",
|
||||
version: "1.2.3.4",
|
||||
description: "A powerful application built with Bun",
|
||||
copyright: "© 2024 My Company Inc",
|
||||
hideConsole: false, // Set to true for GUI applications
|
||||
icon: "./icon.ico", // Path to icon file
|
||||
},
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### Cross-compilation with Bun.build()
|
||||
|
||||
You can cross-compile for different platforms:
|
||||
|
||||
```js
|
||||
// Build for multiple platforms
|
||||
const platforms = [
|
||||
{ target: "bun-windows-x64", outfile: "app-windows.exe" },
|
||||
{ target: "bun-linux-x64", outfile: "app-linux" },
|
||||
{ target: "bun-darwin-arm64", outfile: "app-macos" },
|
||||
];
|
||||
|
||||
for (const platform of platforms) {
|
||||
await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
compile: platform,
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
## Windows-specific flags
|
||||
|
||||
When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file:
|
||||
When compiling a standalone executable for Windows, there are several platform-specific options that can be used to customize the generated `.exe` file:
|
||||
|
||||
- `--windows-icon=path/to/icon.ico` to customize the executable file icon.
|
||||
- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY.
|
||||
### Visual customization
|
||||
|
||||
- `--windows-icon=path/to/icon.ico` - Set the executable file icon
|
||||
- `--windows-hide-console` - Disable the background terminal window (useful for GUI applications)
|
||||
|
||||
### Metadata customization
|
||||
|
||||
You can embed version information and other metadata into your Windows executable:
|
||||
|
||||
- `--windows-title <STR>` - Set the product name (appears in file properties)
|
||||
- `--windows-publisher <STR>` - Set the company name
|
||||
- `--windows-version <STR>` - Set the version number (e.g. "1.2.3.4")
|
||||
- `--windows-description <STR>` - Set the file description
|
||||
- `--windows-copyright <STR>` - Set the copyright information
|
||||
|
||||
#### Example with all metadata flags:
|
||||
|
||||
```sh
|
||||
bun build --compile ./app.ts \
|
||||
--outfile myapp.exe \
|
||||
--windows-title "My Application" \
|
||||
--windows-publisher "My Company Inc" \
|
||||
--windows-version "1.2.3.4" \
|
||||
--windows-description "A powerful application built with Bun" \
|
||||
--windows-copyright "© 2024 My Company Inc"
|
||||
```
|
||||
|
||||
This metadata will be visible in Windows Explorer when viewing the file properties:
|
||||
|
||||
1. Right-click the executable in Windows Explorer
|
||||
2. Select "Properties"
|
||||
3. Go to the "Details" tab
|
||||
|
||||
#### Version string format
|
||||
|
||||
The `--windows-version` flag accepts version strings in the following formats:
|
||||
|
||||
- `"1"` - Will be normalized to "1.0.0.0"
|
||||
- `"1.2"` - Will be normalized to "1.2.0.0"
|
||||
- `"1.2.3"` - Will be normalized to "1.2.3.0"
|
||||
- `"1.2.3.4"` - Full version format
|
||||
|
||||
Each version component must be a number between 0 and 65535.
|
||||
|
||||
{% callout %}
|
||||
|
||||
These flags currently cannot be used when cross-compiling because they depend on Windows APIs.
|
||||
These flags currently cannot be used when cross-compiling because they depend on Windows APIs. They are only available when building on Windows itself.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Bun's fast native bundler is now in beta. It can be used via the `bun build` CLI command or the `Bun.build()` JavaScript API.
|
||||
Bun's fast native bundler can be used via the `bun build` CLI command or the `Bun.build()` JavaScript API.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
@@ -1259,6 +1259,33 @@ $ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=any
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `throw`
|
||||
|
||||
Controls error handling behavior when the build fails. When set to `true` (default), the returned promise rejects with an `AggregateError`. When set to `false`, the promise resolves with a `BuildOutput` object where `success` is `false`.
|
||||
|
||||
```ts#JavaScript
|
||||
// Default behavior: throws on error
|
||||
try {
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
throw: true, // default
|
||||
});
|
||||
} catch (error) {
|
||||
// Handle AggregateError
|
||||
console.error("Build failed:", error);
|
||||
}
|
||||
|
||||
// Alternative: handle errors via success property
|
||||
const result = await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
throw: false,
|
||||
});
|
||||
|
||||
if (!result.success) {
|
||||
console.error("Build failed with errors:", result.logs);
|
||||
}
|
||||
```
|
||||
|
||||
## Outputs
|
||||
|
||||
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
|
||||
@@ -1569,8 +1596,7 @@ interface BuildConfig {
|
||||
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
|
||||
*
|
||||
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
|
||||
* as most usage of `Bun.build` forgets to check for errors.
|
||||
* This defaults to `true`.
|
||||
*/
|
||||
throw?: boolean;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.txt` `.wasm` `.node` `.html`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.toml` `.json` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html`
|
||||
|
||||
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](https://bun.com/docs/bundler/plugins) that extend Bun with custom loaders.
|
||||
|
||||
@@ -121,6 +121,55 @@ export default {
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `yaml`
|
||||
|
||||
**YAML loader**. Default for `.yaml` and `.yml`.
|
||||
|
||||
YAML files can be directly imported. Bun will parse them with its fast native YAML parser.
|
||||
|
||||
```ts
|
||||
import config from "./config.yaml";
|
||||
config.database.host; // => "localhost"
|
||||
|
||||
// via import attribute:
|
||||
// import myCustomYAML from './my.config' with {type: "yaml"};
|
||||
```
|
||||
|
||||
During bundling, the parsed YAML is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```ts
|
||||
var config = {
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
},
|
||||
// ...other fields
|
||||
};
|
||||
config.database.host;
|
||||
```
|
||||
|
||||
If a `.yaml` or `.yml` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```yaml#Input
|
||||
name: John Doe
|
||||
age: 35
|
||||
email: johndoe@example.com
|
||||
```
|
||||
|
||||
```js#Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com"
|
||||
}
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For more details on YAML support including the runtime API `Bun.YAML.parse()`, see the [YAML API documentation](/docs/api/yaml).
|
||||
|
||||
### `text`
|
||||
|
||||
**Text loader**. Default for `.txt`.
|
||||
|
||||
@@ -9,6 +9,7 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
|
||||
- [`onStart()`](#onstart): Run once the bundler has started a bundle
|
||||
- [`onResolve()`](#onresolve): Run before a module is resolved
|
||||
- [`onLoad()`](#onload): Run before a module is loaded.
|
||||
- [`onEnd()`](#onend): Run after the bundle has completed
|
||||
- [`onBeforeParse()`](#onbeforeparse): Run zero-copy native addons in the parser thread before a file is parsed.
|
||||
|
||||
### Reference
|
||||
@@ -18,6 +19,7 @@ A rough overview of the types (please refer to Bun's `bun.d.ts` for the full typ
|
||||
```ts
|
||||
type PluginBuilder = {
|
||||
onStart(callback: () => void): void;
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
onResolve: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
@@ -285,6 +287,53 @@ plugin({
|
||||
|
||||
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
|
||||
|
||||
### `onEnd`
|
||||
|
||||
```ts
|
||||
onEnd(callback: (result: BuildOutput) => void | Promise<void>): void;
|
||||
```
|
||||
|
||||
Registers a callback to be run when the bundler completes a bundle (whether successful or not).
|
||||
|
||||
The callback receives the `BuildOutput` object containing:
|
||||
|
||||
- `success`: boolean indicating if the build succeeded
|
||||
- `outputs`: array of generated build artifacts
|
||||
- `logs`: array of build messages (warnings, errors, etc.)
|
||||
|
||||
This is useful for post-processing, cleanup, notifications, or custom error handling.
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
entrypoints: ["./index.ts"],
|
||||
outdir: "./out",
|
||||
plugins: [
|
||||
{
|
||||
name: "onEnd example",
|
||||
setup(build) {
|
||||
build.onEnd(result => {
|
||||
if (result.success) {
|
||||
console.log(
|
||||
`✅ Build succeeded with ${result.outputs.length} outputs`,
|
||||
);
|
||||
} else {
|
||||
console.error(`❌ Build failed with ${result.logs.length} errors`);
|
||||
}
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
The `onEnd` callbacks are called:
|
||||
|
||||
- **Before** the build promise resolves or rejects
|
||||
- **After** all bundling is complete
|
||||
- **In the order** they were registered
|
||||
|
||||
Multiple plugins can register `onEnd` callbacks, and they will all be called sequentially. If an `onEnd` callback returns a promise, the build will wait for it to resolve before continuing.
|
||||
|
||||
## Native plugins
|
||||
|
||||
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
|
||||
|
||||
@@ -245,8 +245,8 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
---
|
||||
|
||||
- `--jsx-side-effects`
|
||||
- n/a
|
||||
- JSX is always assumed to be side-effect-free
|
||||
- `--jsx-side-effects`
|
||||
- Controls whether JSX expressions are marked as `/* @__PURE__ */` for dead code elimination. Default is `false` (JSX marked as pure).
|
||||
|
||||
---
|
||||
|
||||
@@ -617,7 +617,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
|
||||
- `jsxSideEffects`
|
||||
- `jsxSideEffects`
|
||||
- Not supported in JS API, configure in `tsconfig.json`
|
||||
- Controls whether JSX expressions are marked as pure for dead code elimination
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -230,16 +230,15 @@ $ bun install --backend copyfile
|
||||
|
||||
**`symlink`** is typically only used for `file:` dependencies (and eventually `link:`) internally. To prevent infinite loops, it skips symlinking the `node_modules` folder.
|
||||
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own node_modules folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
|
||||
```bash
|
||||
$ rm -rf node_modules
|
||||
$ bun install --backend symlink
|
||||
$ bun --preserve-symlinks ./my-file.js
|
||||
$ node --preserve-symlinks ./my-file.js # https://nodejs.org/api/cli.html#--preserve-symlinks
|
||||
```
|
||||
|
||||
Bun's runtime does not currently expose an equivalent of `--preserve-symlinks`, though the code for it does exist.
|
||||
|
||||
## npm registry metadata
|
||||
|
||||
bun uses a binary format for caching NPM registry responses. This loads much faster than JSON and tends to be smaller on disk.
|
||||
|
||||
@@ -8,6 +8,14 @@ The `bun` CLI contains a Node.js-compatible package manager designed to be a dra
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% callout %}
|
||||
|
||||
**💾 Disk efficient** — Bun install stores all packages in a global cache (`~/.bun/install/cache/`) and creates hardlinks (Linux) or copy-on-write clones (macOS) to `node_modules`. This means duplicate packages across projects point to the same underlying data, taking up virtually no extra disk space.
|
||||
|
||||
For more details, see [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
{% /callout %}
|
||||
|
||||
{% details summary="For Linux users" %}
|
||||
The recommended minimum Linux Kernel version is 5.6. If you're on Linux kernel 5.1 - 5.5, `bun install` will work, but HTTP requests will be slow due to a lack of support for io_uring's `connect()` operation.
|
||||
|
||||
@@ -207,6 +215,12 @@ Isolated installs create a central package store in `node_modules/.bun/` with sy
|
||||
|
||||
For complete documentation on isolated installs, refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
## Disk efficiency
|
||||
|
||||
Bun uses a global cache at `~/.bun/install/cache/` to minimize disk usage. Packages are stored once and linked to `node_modules` using hardlinks (Linux/Windows) or copy-on-write (macOS), so duplicate packages across projects don't consume additional disk space.
|
||||
|
||||
For complete documentation refer to [Package manager > Global cache](https://bun.com/docs/install/cache).
|
||||
|
||||
## Configuration
|
||||
|
||||
The default behavior of `bun install` can be configured in `bunfig.toml`. The default values are shown below.
|
||||
|
||||
4
docs/guides/deployment/index.json
Normal file
4
docs/guides/deployment/index.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"name": "Deployment",
|
||||
"description": "A collection of guides for deploying Bun to providers"
|
||||
}
|
||||
157
docs/guides/deployment/railway.md
Normal file
157
docs/guides/deployment/railway.md
Normal file
@@ -0,0 +1,157 @@
|
||||
---
|
||||
name: Deploy a Bun application on Railway
|
||||
description: Deploy Bun applications to Railway with this step-by-step guide covering CLI and dashboard methods, optional PostgreSQL setup, and automatic SSL configuration.
|
||||
---
|
||||
|
||||
Railway is an infrastructure platform where you can provision infrastructure, develop with that infrastructure locally, and then deploy to the cloud. It enables instant deployments from GitHub with zero configuration, automatic SSL, and built-in database provisioning.
|
||||
|
||||
This guide walks through deploying a Bun application with a PostgreSQL database (optional), which is exactly what the template below provides.
|
||||
|
||||
You can either follow this guide step-by-step or simply deploy the pre-configured template with one click:
|
||||
|
||||
{% raw %}
|
||||
|
||||
<a href="https://railway.com/deploy/bun-react-postgres?referralCode=Bun&utm_medium=integration&utm_source=template&utm_campaign=bun" target="_blank">
|
||||
<img src="https://railway.com/button.svg" alt="Deploy on Railway" />
|
||||
</a>
|
||||
|
||||
{% /raw %}
|
||||
|
||||
---
|
||||
|
||||
**Prerequisites**:
|
||||
|
||||
- A Bun application ready for deployment
|
||||
- A [Railway account](https://railway.app/)
|
||||
- Railway CLI (for CLI deployment method)
|
||||
- A GitHub account (for Dashboard deployment method)
|
||||
|
||||
---
|
||||
|
||||
## Method 1: Deploy via CLI
|
||||
|
||||
---
|
||||
|
||||
#### Step 1
|
||||
|
||||
Ensure sure you have the Railway CLI installed.
|
||||
|
||||
```bash
|
||||
bun install -g @railway/cli
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### Step 2
|
||||
|
||||
Log into your Railway account.
|
||||
|
||||
```bash
|
||||
railway login
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### Step 3
|
||||
|
||||
After successfully authenticating, initialize a new project.
|
||||
|
||||
```bash
|
||||
# Initialize project
|
||||
bun-react-postgres$ railway init
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### Step 4
|
||||
|
||||
After initializing the project, add a new database and service.
|
||||
|
||||
> **Note:** Step 4 is only necessary if your application uses a database. If you don't need PostgreSQL, skip to Step 5.
|
||||
|
||||
```bash
|
||||
# Add PostgreSQL database. Make sure to add this first!
|
||||
bun-react-postgres$ railway add --database postgres
|
||||
|
||||
# Add your application service.
|
||||
bun-react-postgres$ railway add --service bun-react-db --variables DATABASE_URL=\${{Postgres.DATABASE_URL}}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### Step 5
|
||||
|
||||
After the services have been created and connected, deploy the application to Railway. By default, services are only accessible within Railway's private network. To make your app publicly accessible, you need to generate a public domain.
|
||||
|
||||
```bash
|
||||
# Deploy your application
|
||||
bun-nextjs-starter$ railway up
|
||||
|
||||
# Generate public domain
|
||||
bun-nextjs-starter$ railway domain
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Method 2: Deploy via Dashboard
|
||||
|
||||
---
|
||||
|
||||
#### Step 1
|
||||
|
||||
Create a new project
|
||||
|
||||
1. Go to [Railway Dashboard](http://railway.com/dashboard?utm_medium=integration&utm_source=docs&utm_campaign=bun)
|
||||
2. Click **"+ New"** → **"GitHub repo"**
|
||||
3. Choose your repository
|
||||
|
||||
---
|
||||
|
||||
#### Step 2
|
||||
|
||||
Add a PostgreSQL database, and connect this database to the service
|
||||
|
||||
> **Note:** Step 2 is only necessary if your application uses a database. If you don't need PostgreSQL, skip to Step 3.
|
||||
|
||||
1. Click **"+ New"** → **"Database"** → **"Add PostgreSQL"**
|
||||
2. After the database has been created, select your service (not the database)
|
||||
3. Go to **"Variables"** tab
|
||||
4. Click **"+ New Variable"** → **"Add Reference"**
|
||||
5. Select `DATABASE_URL` from postgres
|
||||
|
||||
---
|
||||
|
||||
#### Step 3
|
||||
|
||||
Generate a public domain
|
||||
|
||||
1. Select your service
|
||||
2. Go to **"Settings"** tab
|
||||
3. Under **"Networking"**, click **"Generate Domain"**
|
||||
|
||||
---
|
||||
|
||||
Your app is now live! Railway auto-deploys on every GitHub push.
|
||||
|
||||
---
|
||||
|
||||
## Configuration (Optional)
|
||||
|
||||
---
|
||||
|
||||
By default, Railway uses [Nixpacks](https://docs.railway.com/guides/build-configuration#nixpacks-options) to automatically detect and build your Bun application with zero configuration.
|
||||
|
||||
However, using the [Railpack](https://docs.railway.com/guides/build-configuration#railpack) application builder provides better Bun support, and will always support the latest version of Bun. The pre-configured templates use Railpack by default.
|
||||
|
||||
To enable Railpack in a custom project, add the following to your `railway.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://railway.com/railway.schema.json",
|
||||
"build": {
|
||||
"builder": "RAILPACK"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
For more build configuration settings, check out the [Railway documentation](https://docs.railway.com/guides/build-configuration).
|
||||
76
docs/guides/runtime/import-yaml.md
Normal file
76
docs/guides/runtime/import-yaml.md
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
name: Import a YAML file
|
||||
---
|
||||
|
||||
Bun natively supports `.yaml` and `.yml` imports.
|
||||
|
||||
```yaml#config.yaml
|
||||
database:
|
||||
host: localhost
|
||||
port: 5432
|
||||
name: myapp
|
||||
|
||||
server:
|
||||
port: 3000
|
||||
timeout: 30
|
||||
|
||||
features:
|
||||
auth: true
|
||||
rateLimit: true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Import the file like any other source file.
|
||||
|
||||
```ts
|
||||
import config from "./config.yaml";
|
||||
|
||||
config.database.host; // => "localhost"
|
||||
config.server.port; // => 3000
|
||||
config.features.auth; // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
You can also use named imports to destructure top-level properties:
|
||||
|
||||
```ts
|
||||
import { database, server, features } from "./config.yaml";
|
||||
|
||||
console.log(database.name); // => "myapp"
|
||||
console.log(server.timeout); // => 30
|
||||
console.log(features.rateLimit); // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Bun also supports [Import Attributes](https://github.com/tc39/proposal-import-attributes) syntax:
|
||||
|
||||
```ts
|
||||
import config from "./config.yaml" with { type: "yaml" };
|
||||
|
||||
config.database.port; // => 5432
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For parsing YAML strings at runtime, use `Bun.YAML.parse()`:
|
||||
|
||||
```ts
|
||||
const yamlString = `
|
||||
name: John Doe
|
||||
age: 30
|
||||
hobbies:
|
||||
- reading
|
||||
- coding
|
||||
`;
|
||||
|
||||
const data = Bun.YAML.parse(yamlString);
|
||||
console.log(data.name); // => "John Doe"
|
||||
console.log(data.hobbies); // => ["reading", "coding"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > YAML](https://bun.com/docs/api/yaml) for complete documentation on YAML support in Bun.
|
||||
@@ -17,7 +17,7 @@ Bun reads the following files automatically (listed in order of increasing prece
|
||||
|
||||
- `.env`
|
||||
- `.env.production`, `.env.development`, `.env.test` (depending on value of `NODE_ENV`)
|
||||
- `.env.local`
|
||||
- `.env.local` (not loaded when `NODE_ENV=test`)
|
||||
|
||||
```txt#.env
|
||||
FOO=hello
|
||||
|
||||
@@ -35,7 +35,7 @@ Add this directive to _just one file_ in your project, such as:
|
||||
- Any single `.ts` file that TypeScript includes in your compilation
|
||||
|
||||
```ts
|
||||
/// <reference types="bun/test-globals" />
|
||||
/// <reference types="bun-types/test-globals" />
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
@@ -48,12 +48,12 @@ This behavior is configurable with the `--backend` flag, which is respected by a
|
||||
- **`copyfile`**: The fallback used when any of the above fail. It is the slowest option. On macOS, it uses `fcopyfile()`; on Linux it uses `copy_file_range()`.
|
||||
- **`symlink`**: Currently used only `file:` (and eventually `link:`) dependencies. To prevent infinite loops, it skips symlinking the `node_modules` folder.
|
||||
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own `node_modules` folder or you pass `--preserve-symlinks` to `node`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
If you install with `--backend=symlink`, Node.js won't resolve node_modules of dependencies unless each dependency has its own `node_modules` folder or you pass `--preserve-symlinks` to `node` or `bun`. See [Node.js documentation on `--preserve-symlinks`](https://nodejs.org/api/cli.html#--preserve-symlinks).
|
||||
|
||||
```bash
|
||||
$ bun install --backend symlink
|
||||
$ node --preserve-symlinks ./foo.js
|
||||
$ bun --preserve-symlinks ./foo.js
|
||||
```
|
||||
|
||||
Bun's runtime does not currently expose an equivalent of `--preserve-symlinks`.
|
||||
{% /details %}
|
||||
|
||||
81
docs/install/security-scanner-api.md
Normal file
81
docs/install/security-scanner-api.md
Normal file
@@ -0,0 +1,81 @@
|
||||
Bun's package manager can scan packages for security vulnerabilities before installation, helping protect your applications from supply chain attacks and known vulnerabilities.
|
||||
|
||||
## Quick Start
|
||||
|
||||
Configure a security scanner in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install.security]
|
||||
scanner = "@acme/bun-security-scanner"
|
||||
```
|
||||
|
||||
When configured, Bun will:
|
||||
|
||||
- Scan all packages before installation
|
||||
- Display security warnings and advisories
|
||||
- Cancel installation if critical vulnerabilities are found
|
||||
- Automatically disable auto-install for security
|
||||
|
||||
## How It Works
|
||||
|
||||
Security scanners analyze packages during `bun install`, `bun add`, and other package operations. They can detect:
|
||||
|
||||
- Known security vulnerabilities (CVEs)
|
||||
- Malicious packages
|
||||
- License compliance issues
|
||||
- ...and more!
|
||||
|
||||
### Security Levels
|
||||
|
||||
Scanners report issues at two severity levels:
|
||||
|
||||
- **`fatal`** - Installation stops immediately, exits with non-zero code
|
||||
- **`warn`** - In interactive terminals, prompts to continue; in CI, exits immediately
|
||||
|
||||
## Using Pre-built Scanners
|
||||
|
||||
Many security companies publish Bun security scanners as npm packages that you can install and use immediately.
|
||||
|
||||
### Installing a Scanner
|
||||
|
||||
Install a security scanner from npm:
|
||||
|
||||
```bash
|
||||
$ bun add -d @acme/bun-security-scanner
|
||||
```
|
||||
|
||||
> **Note:** Consult your security scanner's documentation for their specific package name and installation instructions. Most scanners will be installed with `bun add`.
|
||||
|
||||
### Configuring the Scanner
|
||||
|
||||
After installation, configure it in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install.security]
|
||||
scanner = "@acme/bun-security-scanner"
|
||||
```
|
||||
|
||||
### Enterprise Configuration
|
||||
|
||||
Some enterprise scanners might support authentication and/or configuration through environment variables:
|
||||
|
||||
```bash
|
||||
# This might go in ~/.bashrc, for example
|
||||
export SECURITY_API_KEY="your-api-key"
|
||||
|
||||
# The scanner will now use these credentials automatically
|
||||
bun install
|
||||
```
|
||||
|
||||
Consult your security scanner's documentation to learn which environment variables to set and if any additional configuration is required.
|
||||
|
||||
### Authoring your own scanner
|
||||
|
||||
For a complete example with tests and CI setup, see the official template:
|
||||
[github.com/oven-sh/security-scanner-template](https://github.com/oven-sh/security-scanner-template)
|
||||
|
||||
## Related
|
||||
|
||||
- [Configuration (bunfig.toml)](/docs/runtime/bunfig#install-security-scanner)
|
||||
- [Package Manager](/docs/install)
|
||||
- [Security Scanner Template](https://github.com/oven-sh/security-scanner-template)
|
||||
@@ -219,6 +219,9 @@ export default {
|
||||
page("install/npmrc", ".npmrc support", {
|
||||
description: "Bun supports loading some configuration options from .npmrc",
|
||||
}),
|
||||
page("install/security-scanner-api", "Security Scanner API", {
|
||||
description: "Scan your project for vulnerabilities with Bun's security scanner API.",
|
||||
}),
|
||||
// page("install/utilities", "Utilities", {
|
||||
// description: "Use `bun pm` to introspect your global module cache or project dependency tree.",
|
||||
// }),
|
||||
@@ -383,6 +386,9 @@ export default {
|
||||
page("api/spawn", "Child processes", {
|
||||
description: `Spawn sync and async child processes with easily configurable input and output streams.`,
|
||||
}), // "`Bun.spawn`"),
|
||||
page("api/yaml", "YAML", {
|
||||
description: `Bun.YAML.parse(string) lets you parse YAML files in JavaScript`,
|
||||
}), // "`Bun.spawn`"),
|
||||
page("api/html-rewriter", "HTMLRewriter", {
|
||||
description: `Parse and transform HTML with Bun's native HTMLRewriter API, inspired by Cloudflare Workers.`,
|
||||
}), // "`HTMLRewriter`"),
|
||||
|
||||
@@ -195,12 +195,12 @@ Click the link in the right column to jump to the associated documentation.
|
||||
---
|
||||
|
||||
- Parsing & Formatting
|
||||
- [`Bun.semver`](https://bun.com/docs/api/semver), `Bun.TOML.parse`, [`Bun.color`](https://bun.com/docs/api/color)
|
||||
- [`Bun.semver`](https://bun.com/docs/api/semver), `Bun.TOML.parse`, [`Bun.YAML.parse`](https://bun.com/docs/api/yaml), [`Bun.color`](https://bun.com/docs/api/color)
|
||||
|
||||
---
|
||||
|
||||
- Low-level / Internals
|
||||
- `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.com/docs/api/bun-jsc)
|
||||
- `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.com/reference/bun/jsc)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -94,6 +94,7 @@ Bun supports the following loaders:
|
||||
- `file`
|
||||
- `json`
|
||||
- `toml`
|
||||
- `yaml`
|
||||
- `wasm`
|
||||
- `napi`
|
||||
- `base64`
|
||||
@@ -496,6 +497,62 @@ Whether to generate a non-Bun lockfile alongside `bun.lock`. (A `bun.lock` will
|
||||
print = "yarn"
|
||||
```
|
||||
|
||||
### `install.security.scanner`
|
||||
|
||||
Configure a security scanner to scan packages for vulnerabilities before installation.
|
||||
|
||||
First, install a security scanner from npm:
|
||||
|
||||
```bash
|
||||
$ bun add -d @acme/bun-security-scanner
|
||||
```
|
||||
|
||||
Then configure it in your `bunfig.toml`:
|
||||
|
||||
```toml
|
||||
[install.security]
|
||||
scanner = "@acme/bun-security-scanner"
|
||||
```
|
||||
|
||||
When a security scanner is configured:
|
||||
|
||||
- Auto-install is automatically disabled for security
|
||||
- Packages are scanned before installation
|
||||
- Installation is cancelled if fatal issues are found
|
||||
- Security warnings are displayed during installation
|
||||
|
||||
Learn more about [using and writing security scanners](/docs/install/security).
|
||||
|
||||
### `install.linker`
|
||||
|
||||
Configure the default linker strategy. Default `"hoisted"`.
|
||||
|
||||
For complete documentation refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
|
||||
|
||||
```toml
|
||||
[install]
|
||||
linker = "hoisted"
|
||||
```
|
||||
|
||||
Valid values are:
|
||||
|
||||
{% table %}
|
||||
|
||||
- Value
|
||||
- Description
|
||||
|
||||
---
|
||||
|
||||
- `"hoisted"`
|
||||
- Link dependencies in a shared `node_modules` directory.
|
||||
|
||||
---
|
||||
|
||||
- `"isolated"`
|
||||
- Link dependencies inside each package installation.
|
||||
|
||||
{% /table %}
|
||||
|
||||
<!-- ## Debugging -->
|
||||
|
||||
<!--
|
||||
|
||||
@@ -8,6 +8,10 @@ Bun reads the following files automatically (listed in order of increasing prece
|
||||
- `.env.production`, `.env.development`, `.env.test` (depending on value of `NODE_ENV`)
|
||||
- `.env.local`
|
||||
|
||||
{% callout %}
|
||||
**Note:** When `NODE_ENV=test`, `.env.local` is **not** loaded. This ensures consistent test environments across different executions by preventing local overrides during testing. This behavior matches popular frameworks like [Next.js](https://nextjs.org/docs/pages/guides/environment-variables#test-environment-variables) and [Create React App](https://create-react-app.dev/docs/adding-custom-environment-variables/#what-other-env-files-can-be-used).
|
||||
{% /callout %}
|
||||
|
||||
```txt#.env
|
||||
FOO=hello
|
||||
BAR=world
|
||||
|
||||
@@ -92,15 +92,18 @@ every file before execution. Its transpiler can directly run TypeScript and JSX
|
||||
|
||||
## JSX
|
||||
|
||||
## JSON and TOML
|
||||
## JSON, TOML, and YAML
|
||||
|
||||
Source files can import a `*.json` or `*.toml` file to load its contents as a plain old JavaScript object.
|
||||
Source files can import `*.json`, `*.toml`, or `*.yaml` files to load their contents as plain JavaScript objects.
|
||||
|
||||
```ts
|
||||
import pkg from "./package.json";
|
||||
import bunfig from "./bunfig.toml";
|
||||
import config from "./config.yaml";
|
||||
```
|
||||
|
||||
See the [YAML API documentation](/docs/api/yaml) for more details on YAML support.
|
||||
|
||||
## WASI
|
||||
|
||||
{% callout %}
|
||||
|
||||
@@ -246,6 +246,65 @@ The module from which the component factory function (`createElement`, `jsx`, `j
|
||||
|
||||
{% /table %}
|
||||
|
||||
### `jsxSideEffects`
|
||||
|
||||
By default, Bun marks JSX expressions as `/* @__PURE__ */` so they can be removed during bundling if they are unused (known as "dead code elimination" or "tree shaking"). Set `jsxSideEffects` to `true` to prevent this behavior.
|
||||
|
||||
{% table %}
|
||||
|
||||
- Compiler options
|
||||
- Transpiled output
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react",
|
||||
// jsxSideEffects is false by default
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// JSX expressions are marked as pure
|
||||
/* @__PURE__ */ React.createElement("div", null, "Hello");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react",
|
||||
"jsxSideEffects": true,
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// JSX expressions are not marked as pure
|
||||
React.createElement("div", null, "Hello");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
- ```jsonc
|
||||
{
|
||||
"jsx": "react-jsx",
|
||||
"jsxSideEffects": true,
|
||||
}
|
||||
```
|
||||
|
||||
- ```tsx
|
||||
// Automatic runtime also respects jsxSideEffects
|
||||
jsx("div", { children: "Hello" });
|
||||
```
|
||||
|
||||
{% /table %}
|
||||
|
||||
This option is also available as a CLI flag:
|
||||
|
||||
```bash
|
||||
$ bun build --jsx-side-effects
|
||||
```
|
||||
|
||||
### JSX pragma
|
||||
|
||||
All of these values can be set on a per-file basis using _pragmas_. A pragma is a special comment that sets a compiler option in a particular file.
|
||||
|
||||
@@ -52,15 +52,18 @@ Hello world!
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## JSON and TOML
|
||||
## JSON, TOML, and YAML
|
||||
|
||||
JSON and TOML files can be directly imported from a source file. The contents will be loaded and returned as a JavaScript object.
|
||||
JSON, TOML, and YAML files can be directly imported from a source file. The contents will be loaded and returned as a JavaScript object.
|
||||
|
||||
```ts
|
||||
import pkg from "./package.json";
|
||||
import data from "./data.toml";
|
||||
import config from "./config.yaml";
|
||||
```
|
||||
|
||||
For more details on YAML support, see the [YAML API documentation](/docs/api/yaml).
|
||||
|
||||
## WASI
|
||||
|
||||
{% callout %}
|
||||
|
||||
@@ -12,6 +12,8 @@ test("NODE_ENV is set to test", () => {
|
||||
});
|
||||
```
|
||||
|
||||
When `NODE_ENV` is set to `"test"`, Bun will not load `.env.local` files. This ensures consistent test environments across different executions by preventing local overrides during testing. Instead, use `.env.test` for test-specific environment variables, which should be committed to your repository for consistency across all developers and CI environments.
|
||||
|
||||
#### `$TZ` environment variable
|
||||
|
||||
By default, all `bun test` runs use UTC (`Etc/UTC`) as the time zone unless overridden by the `TZ` environment variable. This ensures consistent date and time behavior across different development environments.
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.2.21",
|
||||
"version": "1.2.22",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
],
|
||||
"devDependencies": {
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"@lezer/common": "^1.2.3",
|
||||
"@lezer/cpp": "^1.1.3",
|
||||
"@types/bun": "workspace:*",
|
||||
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
|
||||
"esbuild": "^0.21.4",
|
||||
"mitata": "^0.1.11",
|
||||
"peechy": "0.4.34",
|
||||
@@ -31,7 +32,7 @@
|
||||
"watch-windows": "bun run zig build check-windows --watch -fincremental --prominent-compile-errors --global-cache-dir build/debug/zig-check-cache --zig-lib-dir vendor/zig/lib",
|
||||
"bd:v": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug",
|
||||
"bd": "BUN_DEBUG_QUIET_LOGS=1 bun --silent bd:v",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun scripts/glob-sources.mjs > /dev/null && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug": "export COMSPEC=\"C:\\Windows\\System32\\cmd.exe\" && bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug --log-level=NOTICE",
|
||||
"build:debug:asan": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_ASAN=ON -B build/debug-asan --log-level=NOTICE",
|
||||
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
|
||||
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
|
||||
|
||||
@@ -18,9 +18,11 @@ typedef enum {
|
||||
BUN_LOADER_BASE64 = 10,
|
||||
BUN_LOADER_DATAURL = 11,
|
||||
BUN_LOADER_TEXT = 12,
|
||||
BUN_LOADER_HTML = 17,
|
||||
BUN_LOADER_YAML = 18,
|
||||
} BunLoader;
|
||||
|
||||
const BunLoader BUN_LOADER_MAX = BUN_LOADER_TEXT;
|
||||
const BunLoader BUN_LOADER_MAX = BUN_LOADER_YAML;
|
||||
|
||||
typedef struct BunLogOptions {
|
||||
size_t __struct_size;
|
||||
|
||||
1221
packages/bun-types/bun.d.ts
vendored
1221
packages/bun-types/bun.d.ts
vendored
File diff suppressed because it is too large
Load Diff
31
packages/bun-types/deprecated.d.ts
vendored
31
packages/bun-types/deprecated.d.ts
vendored
@@ -1,4 +1,35 @@
|
||||
declare module "bun" {
|
||||
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
|
||||
type Platform =
|
||||
| "aix"
|
||||
| "android"
|
||||
| "darwin"
|
||||
| "freebsd"
|
||||
| "haiku"
|
||||
| "linux"
|
||||
| "openbsd"
|
||||
| "sunos"
|
||||
| "win32"
|
||||
| "cygwin"
|
||||
| "netbsd";
|
||||
|
||||
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
|
||||
type Architecture = "arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x64";
|
||||
|
||||
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
|
||||
type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void;
|
||||
|
||||
/**
|
||||
* Most of the time the unhandledRejection will be an Error, but this should not be relied upon
|
||||
* as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error.
|
||||
*
|
||||
* @deprecated This type is unused in Bun's types and might be removed in the near future
|
||||
*/
|
||||
type UnhandledRejectionListener = (reason: unknown, promise: Promise<unknown>) => void;
|
||||
|
||||
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
|
||||
type MultipleResolveListener = (type: MultipleResolveType, promise: Promise<unknown>, value: unknown) => void;
|
||||
|
||||
/**
|
||||
* Consume all data from a {@link ReadableStream} until it closes or errors.
|
||||
*
|
||||
|
||||
4
packages/bun-types/experimental.d.ts
vendored
4
packages/bun-types/experimental.d.ts
vendored
@@ -191,7 +191,9 @@ declare module "bun" {
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export type SSGPage<Params extends SSGParamsLike = SSGParamsLike> = React.ComponentType<SSGPageProps<Params>>;
|
||||
export type SSGPage<Params extends SSGParamsLike = SSGParamsLike> = import("react").ComponentType<
|
||||
SSGPageProps<Params>
|
||||
>;
|
||||
|
||||
/**
|
||||
* getStaticPaths is Bun's implementation of SSG (Static Site Generation) path determination.
|
||||
|
||||
10
packages/bun-types/extensions.d.ts
vendored
10
packages/bun-types/extensions.d.ts
vendored
@@ -8,6 +8,16 @@ declare module "*.toml" {
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.yaml" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.yml" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.jsonc" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
|
||||
19
packages/bun-types/ffi.d.ts
vendored
19
packages/bun-types/ffi.d.ts
vendored
@@ -219,44 +219,39 @@ declare module "bun:ffi" {
|
||||
|
||||
/**
|
||||
* int64 is a 64-bit signed integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
int64_t = 7,
|
||||
/**
|
||||
* i64 is a 64-bit signed integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
i64 = 7,
|
||||
|
||||
/**
|
||||
* 64-bit unsigned integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
uint64_t = 8,
|
||||
/**
|
||||
* 64-bit unsigned integer
|
||||
*
|
||||
* This is not implemented yet!
|
||||
*/
|
||||
u64 = 8,
|
||||
|
||||
/**
|
||||
* Doubles are not supported yet!
|
||||
* IEEE-754 double precision float
|
||||
*/
|
||||
double = 9,
|
||||
|
||||
/**
|
||||
* Doubles are not supported yet!
|
||||
* Alias of {@link FFIType.double}
|
||||
*/
|
||||
f64 = 9,
|
||||
|
||||
/**
|
||||
* Floats are not supported yet!
|
||||
* IEEE-754 single precision float
|
||||
*/
|
||||
float = 10,
|
||||
|
||||
/**
|
||||
* Floats are not supported yet!
|
||||
* Alias of {@link FFIType.float}
|
||||
*/
|
||||
f32 = 10,
|
||||
|
||||
|
||||
24
packages/bun-types/globals.d.ts
vendored
24
packages/bun-types/globals.d.ts
vendored
@@ -1564,6 +1564,12 @@ declare var AbortController: Bun.__internal.UseLibDomIfAvailable<
|
||||
}
|
||||
>;
|
||||
|
||||
interface AbortSignal extends EventTarget {
|
||||
readonly aborted: boolean;
|
||||
onabort: ((this: AbortSignal, ev: Event) => any) | null;
|
||||
readonly reason: any;
|
||||
throwIfAborted(): void;
|
||||
}
|
||||
declare var AbortSignal: Bun.__internal.UseLibDomIfAvailable<
|
||||
"AbortSignal",
|
||||
{
|
||||
@@ -1948,3 +1954,21 @@ declare namespace fetch {
|
||||
): void;
|
||||
}
|
||||
//#endregion
|
||||
|
||||
interface RegExpConstructor {
|
||||
/**
|
||||
* Escapes any potential regex syntax characters in a string, and returns a
|
||||
* new string that can be safely used as a literal pattern for the RegExp()
|
||||
* constructor.
|
||||
*
|
||||
* [MDN Reference](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/escape)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const re = new RegExp(RegExp.escape("foo.bar"));
|
||||
* re.test("foo.bar"); // true
|
||||
* re.test("foo!bar"); // false
|
||||
* ```
|
||||
*/
|
||||
escape(string: string): string;
|
||||
}
|
||||
|
||||
2
packages/bun-types/index.d.ts
vendored
2
packages/bun-types/index.d.ts
vendored
@@ -21,6 +21,8 @@
|
||||
/// <reference path="./redis.d.ts" />
|
||||
/// <reference path="./shell.d.ts" />
|
||||
/// <reference path="./experimental.d.ts" />
|
||||
/// <reference path="./sql.d.ts" />
|
||||
/// <reference path="./security.d.ts" />
|
||||
|
||||
/// <reference path="./bun.ns.d.ts" />
|
||||
|
||||
|
||||
96
packages/bun-types/overrides.d.ts
vendored
96
packages/bun-types/overrides.d.ts
vendored
@@ -24,6 +24,12 @@ declare module "stream/web" {
|
||||
}
|
||||
}
|
||||
|
||||
declare module "url" {
|
||||
interface URLSearchParams {
|
||||
toJSON(): Record<string, string>;
|
||||
}
|
||||
}
|
||||
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv extends Bun.Env {}
|
||||
@@ -168,6 +174,96 @@ declare global {
|
||||
UV_ENODATA: number;
|
||||
UV_EUNATCH: number;
|
||||
};
|
||||
binding(m: "http_parser"): {
|
||||
methods: [
|
||||
"DELETE",
|
||||
"GET",
|
||||
"HEAD",
|
||||
"POST",
|
||||
"PUT",
|
||||
"CONNECT",
|
||||
"OPTIONS",
|
||||
"TRACE",
|
||||
"COPY",
|
||||
"LOCK",
|
||||
"MKCOL",
|
||||
"MOVE",
|
||||
"PROPFIND",
|
||||
"PROPPATCH",
|
||||
"SEARCH",
|
||||
"UNLOCK",
|
||||
"BIND",
|
||||
"REBIND",
|
||||
"UNBIND",
|
||||
"ACL",
|
||||
"REPORT",
|
||||
"MKACTIVITY",
|
||||
"CHECKOUT",
|
||||
"MERGE",
|
||||
"M - SEARCH",
|
||||
"NOTIFY",
|
||||
"SUBSCRIBE",
|
||||
"UNSUBSCRIBE",
|
||||
"PATCH",
|
||||
"PURGE",
|
||||
"MKCALENDAR",
|
||||
"LINK",
|
||||
"UNLINK",
|
||||
"SOURCE",
|
||||
"QUERY",
|
||||
];
|
||||
allMethods: [
|
||||
"DELETE",
|
||||
"GET",
|
||||
"HEAD",
|
||||
"POST",
|
||||
"PUT",
|
||||
"CONNECT",
|
||||
"OPTIONS",
|
||||
"TRACE",
|
||||
"COPY",
|
||||
"LOCK",
|
||||
"MKCOL",
|
||||
"MOVE",
|
||||
"PROPFIND",
|
||||
"PROPPATCH",
|
||||
"SEARCH",
|
||||
"UNLOCK",
|
||||
"BIND",
|
||||
"REBIND",
|
||||
"UNBIND",
|
||||
"ACL",
|
||||
"REPORT",
|
||||
"MKACTIVITY",
|
||||
"CHECKOUT",
|
||||
"MERGE",
|
||||
"M - SEARCH",
|
||||
"NOTIFY",
|
||||
"SUBSCRIBE",
|
||||
"UNSUBSCRIBE",
|
||||
"PATCH",
|
||||
"PURGE",
|
||||
"MKCALENDAR",
|
||||
"LINK",
|
||||
"UNLINK",
|
||||
"SOURCE",
|
||||
"PRI",
|
||||
"DESCRIBE",
|
||||
"ANNOUNCE",
|
||||
"SETUP",
|
||||
"PLAY",
|
||||
"PAUSE",
|
||||
"TEARDOWN",
|
||||
"GET_PARAMETER",
|
||||
"SET_PARAMETER",
|
||||
"REDIRECT",
|
||||
"RECORD",
|
||||
"FLUSH",
|
||||
"QUERY",
|
||||
];
|
||||
HTTPParser: unknown;
|
||||
ConnectionsList: unknown;
|
||||
};
|
||||
binding(m: string): object;
|
||||
}
|
||||
|
||||
|
||||
44
packages/bun-types/redis.d.ts
vendored
44
packages/bun-types/redis.d.ts
vendored
@@ -574,6 +574,50 @@ declare module "bun" {
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration in seconds
|
||||
* @param key The key to get
|
||||
* @param ex Set the specified expire time, in seconds
|
||||
* @param seconds The number of seconds until expiration
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, ex: "EX", seconds: number): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration in milliseconds
|
||||
* @param key The key to get
|
||||
* @param px Set the specified expire time, in milliseconds
|
||||
* @param milliseconds The number of milliseconds until expiration
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, px: "PX", milliseconds: number): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration at a specific Unix timestamp in seconds
|
||||
* @param key The key to get
|
||||
* @param exat Set the specified Unix time at which the key will expire, in seconds
|
||||
* @param timestampSeconds The Unix timestamp in seconds
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, exat: "EXAT", timestampSeconds: number): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and set its expiration at a specific Unix timestamp in milliseconds
|
||||
* @param key The key to get
|
||||
* @param pxat Set the specified Unix time at which the key will expire, in milliseconds
|
||||
* @param timestampMilliseconds The Unix timestamp in milliseconds
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, pxat: "PXAT", timestampMilliseconds: number): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Get the value of a key and remove its expiration
|
||||
* @param key The key to get
|
||||
* @param persist Remove the expiration from the key
|
||||
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
|
||||
*/
|
||||
getex(key: RedisClient.KeyLike, persist: "PERSIST"): Promise<string | null>;
|
||||
|
||||
/**
|
||||
* Ping the server
|
||||
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable
|
||||
|
||||
101
packages/bun-types/security.d.ts
vendored
Normal file
101
packages/bun-types/security.d.ts
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
declare module "bun" {
|
||||
/**
|
||||
* `bun install` security related declarations
|
||||
*/
|
||||
export namespace Security {
|
||||
export interface Package {
|
||||
/**
|
||||
* The name of the package
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* The resolved version to be installed that matches the requested range.
|
||||
*
|
||||
* This is the exact version string, **not** a range.
|
||||
*/
|
||||
version: string;
|
||||
|
||||
/**
|
||||
* The URL of the tgz of this package that Bun will download
|
||||
*/
|
||||
tarball: string;
|
||||
|
||||
/**
|
||||
* The range that was requested by the command
|
||||
*
|
||||
* This could be a tag like `beta` or a semver range like `>=4.0.0`
|
||||
*/
|
||||
requestedRange: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Advisory represents the result of a security scan result of a package
|
||||
*/
|
||||
export interface Advisory {
|
||||
/**
|
||||
* Level represents the degree of danger for a security advisory
|
||||
*
|
||||
* Bun behaves differently depending on the values returned from the
|
||||
* {@link Scanner.scan `scan()`} hook:
|
||||
*
|
||||
* > In any case, Bun *always* pretty prints *all* the advisories,
|
||||
* > but...
|
||||
* >
|
||||
* > → if any **fatal**, Bun will immediately cancel the installation
|
||||
* > and quit with a non-zero exit code
|
||||
* >
|
||||
* > → else if any **warn**, Bun will either ask the user if they'd like
|
||||
* > to continue with the install if in a TTY environment, or
|
||||
* > immediately exit if not.
|
||||
*/
|
||||
level: "fatal" | "warn";
|
||||
|
||||
/**
|
||||
* The name of the package attempting to be installed.
|
||||
*/
|
||||
package: string;
|
||||
|
||||
/**
|
||||
* If available, this is a url linking to a CVE or report online so
|
||||
* users can learn more about the advisory.
|
||||
*/
|
||||
url: string | null;
|
||||
|
||||
/**
|
||||
* If available, this is a brief description of the advisory that Bun
|
||||
* will print to the user.
|
||||
*/
|
||||
description: string | null;
|
||||
}
|
||||
|
||||
export interface Scanner {
|
||||
/**
|
||||
* This is the version of the scanner implementation. It may change in
|
||||
* future versions, so we will use this version to discriminate between
|
||||
* such versions. It's entirely possible this API changes in the future
|
||||
* so much that version 1 would no longer be supported.
|
||||
*
|
||||
* The version is required because third-party scanner package versions
|
||||
* are inherently unrelated to Bun versions
|
||||
*/
|
||||
version: "1";
|
||||
|
||||
/**
|
||||
* Perform an advisory check when a user ran `bun add <package>
|
||||
* [...packages]` or other related/similar commands.
|
||||
*
|
||||
* If this function throws an error, Bun will immediately stop the
|
||||
* install process and print the error to the user.
|
||||
*
|
||||
* @param info An object containing an array of packages to be added.
|
||||
* The package array will contain all proposed dependencies, including
|
||||
* transitive ones. More simply, that means it will include dependencies
|
||||
* of the packages the user wants to add.
|
||||
*
|
||||
* @returns A list of advisories.
|
||||
*/
|
||||
scan: (info: { packages: Package[] }) => Promise<Advisory[]>;
|
||||
}
|
||||
}
|
||||
}
|
||||
2
packages/bun-types/shell.d.ts
vendored
2
packages/bun-types/shell.d.ts
vendored
@@ -211,7 +211,7 @@ declare module "bun" {
|
||||
* try {
|
||||
* const result = await $`exit 1`;
|
||||
* } catch (error) {
|
||||
* if (error instanceof ShellError) {
|
||||
* if (error instanceof $.ShellError) {
|
||||
* console.log(error.exitCode); // 1
|
||||
* }
|
||||
* }
|
||||
|
||||
809
packages/bun-types/sql.d.ts
vendored
Normal file
809
packages/bun-types/sql.d.ts
vendored
Normal file
@@ -0,0 +1,809 @@
|
||||
import type * as BunSQLite from "bun:sqlite";
|
||||
|
||||
declare module "bun" {
|
||||
/**
|
||||
* Represents a reserved connection from the connection pool Extends SQL with
|
||||
* additional release functionality
|
||||
*/
|
||||
interface ReservedSQL extends SQL, Disposable {
|
||||
/**
|
||||
* Releases the client back to the connection pool
|
||||
*/
|
||||
release(): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a client within a transaction context Extends SQL with savepoint
|
||||
* functionality
|
||||
*/
|
||||
interface TransactionSQL extends SQL {
|
||||
/**
|
||||
* Creates a savepoint within the current transaction
|
||||
*/
|
||||
savepoint<T>(name: string, fn: SQL.SavepointContextCallback<T>): Promise<T>;
|
||||
savepoint<T>(fn: SQL.SavepointContextCallback<T>): Promise<T>;
|
||||
|
||||
/**
|
||||
* The reserve method pulls out a connection from the pool, and returns a
|
||||
* client that wraps the single connection.
|
||||
*
|
||||
* Using reserve() inside of a transaction will return a brand new
|
||||
* connection, not one related to the transaction. This matches the
|
||||
* behaviour of the `postgres` package.
|
||||
*/
|
||||
reserve(): Promise<ReservedSQL>;
|
||||
}
|
||||
|
||||
namespace SQL {
|
||||
class SQLError extends Error {
|
||||
constructor(message: string);
|
||||
}
|
||||
|
||||
class PostgresError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: string | undefined;
|
||||
public readonly detail: string | undefined;
|
||||
public readonly hint: string | undefined;
|
||||
public readonly severity: string | undefined;
|
||||
public readonly position: string | undefined;
|
||||
public readonly internalPosition: string | undefined;
|
||||
public readonly internalQuery: string | undefined;
|
||||
public readonly where: string | undefined;
|
||||
public readonly schema: string | undefined;
|
||||
public readonly table: string | undefined;
|
||||
public readonly column: string | undefined;
|
||||
public readonly dataType: string | undefined;
|
||||
public readonly constraint: string | undefined;
|
||||
public readonly file: string | undefined;
|
||||
public readonly line: string | undefined;
|
||||
public readonly routine: string | undefined;
|
||||
|
||||
constructor(
|
||||
message: string,
|
||||
options: {
|
||||
code: string;
|
||||
errno?: string | undefined;
|
||||
detail?: string;
|
||||
hint?: string | undefined;
|
||||
severity?: string | undefined;
|
||||
position?: string | undefined;
|
||||
internalPosition?: string;
|
||||
internalQuery?: string;
|
||||
where?: string | undefined;
|
||||
schema?: string;
|
||||
table?: string | undefined;
|
||||
column?: string | undefined;
|
||||
dataType?: string | undefined;
|
||||
constraint?: string;
|
||||
file?: string | undefined;
|
||||
line?: string | undefined;
|
||||
routine?: string | undefined;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
class MySQLError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: number | undefined;
|
||||
public readonly sqlState: string | undefined;
|
||||
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
|
||||
}
|
||||
|
||||
class SQLiteError extends SQLError {
|
||||
public readonly code: string;
|
||||
public readonly errno: number;
|
||||
public readonly byteOffset?: number | undefined;
|
||||
|
||||
constructor(message: string, options: { code: string; errno: number; byteOffset?: number | undefined });
|
||||
}
|
||||
|
||||
type AwaitPromisesArray<T extends Array<PromiseLike<any>>> = {
|
||||
[K in keyof T]: Awaited<T[K]>;
|
||||
};
|
||||
|
||||
type ContextCallbackResult<T> = T extends Array<PromiseLike<any>> ? AwaitPromisesArray<T> : Awaited<T>;
|
||||
type ContextCallback<T, SQL> = (sql: SQL) => Bun.MaybePromise<T>;
|
||||
|
||||
interface SQLiteOptions extends BunSQLite.DatabaseOptions {
|
||||
adapter?: "sqlite";
|
||||
|
||||
/**
|
||||
* Specify the path to the database file
|
||||
*
|
||||
* Examples:
|
||||
*
|
||||
* - `sqlite://:memory:`
|
||||
* - `sqlite://./path/to/database.db`
|
||||
* - `sqlite:///Users/bun/projects/my-app/database.db`
|
||||
* - `./dev.db`
|
||||
* - `:memory:`
|
||||
*
|
||||
* @default ":memory:"
|
||||
*/
|
||||
filename?: URL | ":memory:" | (string & {}) | undefined;
|
||||
|
||||
/**
|
||||
* Callback executed when a connection attempt completes (SQLite)
|
||||
* Receives an Error on failure, or null on success.
|
||||
*/
|
||||
onconnect?: ((err: Error | null) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Callback executed when a connection is closed (SQLite)
|
||||
* Receives the closing Error or null.
|
||||
*/
|
||||
onclose?: ((err: Error | null) => void) | undefined;
|
||||
}
|
||||
|
||||
interface PostgresOrMySQLOptions {
|
||||
/**
|
||||
* Connection URL (can be string or URL object)
|
||||
*/
|
||||
url?: URL | string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname
|
||||
* @default "localhost"
|
||||
*/
|
||||
host?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server hostname (alias for host)
|
||||
* @deprecated Prefer {@link host}
|
||||
* @default "localhost"
|
||||
*/
|
||||
hostname?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database server port number
|
||||
* @default 5432
|
||||
*/
|
||||
port?: number | string | undefined;
|
||||
|
||||
/**
|
||||
* Database user for authentication
|
||||
* @default "postgres"
|
||||
*/
|
||||
username?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database user for authentication (alias for username)
|
||||
* @deprecated Prefer {@link username}
|
||||
* @default "postgres"
|
||||
*/
|
||||
user?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database password for authentication
|
||||
* @default ""
|
||||
*/
|
||||
password?: string | (() => MaybePromise<string>) | undefined;
|
||||
|
||||
/**
|
||||
* Database password for authentication (alias for password)
|
||||
* @deprecated Prefer {@link password}
|
||||
* @default ""
|
||||
*/
|
||||
pass?: string | (() => MaybePromise<string>) | undefined;
|
||||
|
||||
/**
|
||||
* Name of the database to connect to
|
||||
* @default The username value
|
||||
*/
|
||||
database?: string | undefined;
|
||||
|
||||
/**
|
||||
* Name of the database to connect to (alias for database)
|
||||
* @deprecated Prefer {@link database}
|
||||
* @default The username value
|
||||
*/
|
||||
db?: string | undefined;
|
||||
|
||||
/**
|
||||
* Database adapter/driver to use
|
||||
* @default "postgres"
|
||||
*/
|
||||
adapter?: "postgres" | "mysql" | "mariadb";
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait for connection to become available
|
||||
* @default 0 (no timeout)
|
||||
*/
|
||||
idleTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait for connection to become available (alias for idleTimeout)
|
||||
* @deprecated Prefer {@link idleTimeout}
|
||||
* @default 0 (no timeout)
|
||||
*/
|
||||
idle_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection
|
||||
* @default 30
|
||||
*/
|
||||
connectionTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connection_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias
|
||||
* for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connectTimeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum time in seconds to wait when establishing a connection (alias
|
||||
* for connectionTimeout)
|
||||
* @deprecated Prefer {@link connectionTimeout}
|
||||
* @default 30
|
||||
*/
|
||||
connect_timeout?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum lifetime in seconds of a connection
|
||||
* @default 0 (no maximum lifetime)
|
||||
*/
|
||||
maxLifetime?: number | undefined;
|
||||
|
||||
/**
|
||||
* Maximum lifetime in seconds of a connection (alias for maxLifetime)
|
||||
* @deprecated Prefer {@link maxLifetime}
|
||||
* @default 0 (no maximum lifetime)
|
||||
*/
|
||||
max_lifetime?: number | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection
|
||||
* @default false
|
||||
*/
|
||||
tls?: TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Whether to use TLS/SSL for the connection (alias for tls)
|
||||
* @default false
|
||||
*/
|
||||
ssl?: TLSOptions | boolean | undefined;
|
||||
|
||||
/**
|
||||
* Unix domain socket path for connection
|
||||
* @default undefined
|
||||
*/
|
||||
path?: string | undefined;
|
||||
|
||||
/**
|
||||
* Callback executed when a connection attempt completes
|
||||
* Receives an Error on failure, or null on success.
|
||||
*/
|
||||
onconnect?: ((err: Error | null) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Callback executed when a connection is closed
|
||||
* Receives the closing Error or null.
|
||||
*/
|
||||
onclose?: ((err: Error | null) => void) | undefined;
|
||||
|
||||
/**
|
||||
* Postgres client runtime configuration options
|
||||
*
|
||||
* @see https://www.postgresql.org/docs/current/runtime-config-client.html
|
||||
*/
|
||||
connection?: Record<string, string | boolean | number> | undefined;
|
||||
|
||||
/**
|
||||
* Maximum number of connections in the pool
|
||||
* @default 10
|
||||
*/
|
||||
max?: number | undefined;
|
||||
|
||||
/**
|
||||
* By default values outside i32 range are returned as strings. If this is
|
||||
* true, values outside i32 range are returned as BigInts.
|
||||
* @default false
|
||||
*/
|
||||
bigint?: boolean | undefined;
|
||||
|
||||
/**
|
||||
* Automatic creation of prepared statements
|
||||
* @default true
|
||||
*/
|
||||
prepare?: boolean | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Configuration options for SQL client connection and behavior
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const config: Bun.SQL.Options = {
|
||||
* host: 'localhost',
|
||||
* port: 5432,
|
||||
* user: 'dbuser',
|
||||
* password: 'secretpass',
|
||||
* database: 'myapp',
|
||||
* idleTimeout: 30,
|
||||
* max: 20,
|
||||
* onconnect: (client) => {
|
||||
* console.log('Connected to database');
|
||||
* }
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
type Options = SQLiteOptions | PostgresOrMySQLOptions;
|
||||
|
||||
/**
|
||||
* Represents a SQL query that can be executed, with additional control
|
||||
* methods Extends Promise to allow for async/await usage
|
||||
*/
|
||||
interface Query<T> extends Promise<T> {
|
||||
/**
|
||||
* Indicates if the query is currently executing
|
||||
*/
|
||||
active: boolean;
|
||||
|
||||
/**
|
||||
* Indicates if the query has been cancelled
|
||||
*/
|
||||
cancelled: boolean;
|
||||
|
||||
/**
|
||||
* Cancels the executing query
|
||||
*/
|
||||
cancel(): Query<T>;
|
||||
|
||||
/**
|
||||
* Executes the query as a simple query, no parameters are allowed but can
|
||||
* execute multiple commands separated by semicolons
|
||||
*/
|
||||
simple(): Query<T>;
|
||||
|
||||
/**
|
||||
* Executes the query
|
||||
*/
|
||||
execute(): Query<T>;
|
||||
|
||||
/**
|
||||
* Returns the raw query result
|
||||
*/
|
||||
raw(): Query<T>;
|
||||
|
||||
/**
|
||||
* Returns only the values from the query result
|
||||
*/
|
||||
values(): Query<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callback function type for transaction contexts
|
||||
* @param sql Function to execute SQL queries within the transaction
|
||||
*/
|
||||
type TransactionContextCallback<T> = ContextCallback<T, TransactionSQL>;
|
||||
|
||||
/**
|
||||
* Callback function type for savepoint contexts
|
||||
* @param sql Function to execute SQL queries within the savepoint
|
||||
*/
|
||||
type SavepointContextCallback<T> = ContextCallback<T, SavepointSQL>;
|
||||
|
||||
/**
|
||||
* SQL.Helper represents a parameter or serializable
|
||||
* value inside of a query.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const helper = sql(users, 'id');
|
||||
* await sql`insert into users ${helper}`;
|
||||
* ```
|
||||
*/
|
||||
interface Helper<T> {
|
||||
readonly value: T[];
|
||||
readonly columns: (keyof T)[];
|
||||
}
|
||||
}
|
||||
|
||||
interface SQL extends AsyncDisposable {
|
||||
/**
|
||||
* Executes a SQL query using template literals
|
||||
* @example
|
||||
* ```ts
|
||||
* const [user] = await sql<Users[]>`select * from users where id = ${1}`;
|
||||
* ```
|
||||
*/
|
||||
<T = any>(strings: TemplateStringsArray, ...values: unknown[]): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Execute a SQL query using a string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const users = await sql<User[]>`SELECT * FROM users WHERE id = ${1}`;
|
||||
* ```
|
||||
*/
|
||||
<T = any>(string: string): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Helper function for inserting an object into a query
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Insert an object
|
||||
* const result = await sql`insert into users ${sql(users)} returning *`;
|
||||
*
|
||||
* // Or pick specific columns
|
||||
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
|
||||
*
|
||||
* // Or a single object
|
||||
* const result = await sql`insert into users ${sql(user)} returning *`;
|
||||
* ```
|
||||
*/
|
||||
<T extends { [Key in PropertyKey]: unknown }>(obj: T | T[] | readonly T[]): SQL.Helper<T>; // Contributor note: This is the same as the signature below with the exception of the columns and the Pick<T, Keys>
|
||||
|
||||
/**
|
||||
* Helper function for inserting an object into a query, supporting specific columns
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Insert an object
|
||||
* const result = await sql`insert into users ${sql(users)} returning *`;
|
||||
*
|
||||
* // Or pick specific columns
|
||||
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
|
||||
*
|
||||
* // Or a single object
|
||||
* const result = await sql`insert into users ${sql(user)} returning *`;
|
||||
* ```
|
||||
*/
|
||||
<T extends { [Key in PropertyKey]: unknown }, Keys extends keyof T = keyof T>(
|
||||
obj: T | T[] | readonly T[],
|
||||
...columns: readonly Keys[]
|
||||
): SQL.Helper<Pick<T, Keys>>; // Contributor note: This is the same as the signature above with the exception of this signature tracking keys
|
||||
|
||||
/**
|
||||
* Helper function for inserting any serializable value into a query
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const result = await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`;
|
||||
* ```
|
||||
*/
|
||||
<T>(value: T): SQL.Helper<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Main SQL client interface providing connection and transaction management
|
||||
*/
|
||||
class SQL {
|
||||
/**
|
||||
* Creates a new SQL client instance
|
||||
*
|
||||
* @param connectionString - The connection string for the SQL client
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const sql = new SQL("postgres://localhost:5432/mydb");
|
||||
* const sql = new SQL(new URL("postgres://localhost:5432/mydb"));
|
||||
* ```
|
||||
*/
|
||||
constructor(connectionString: string | URL);
|
||||
|
||||
/**
|
||||
* Creates a new SQL client instance with options
|
||||
*
|
||||
* @param connectionString - The connection string for the SQL client
|
||||
* @param options - The options for the SQL client
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const sql = new SQL("postgres://localhost:5432/mydb", { idleTimeout: 1000 });
|
||||
* ```
|
||||
*/
|
||||
constructor(
|
||||
connectionString: string | URL,
|
||||
options: Bun.__internal.DistributedOmit<SQL.Options, "url" | "filename">,
|
||||
);
|
||||
|
||||
/**
|
||||
* Creates a new SQL client instance with options
|
||||
*
|
||||
* @param options - The options for the SQL client
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const sql = new SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 });
|
||||
* ```
|
||||
*/
|
||||
constructor(options?: SQL.Options);
|
||||
|
||||
/**
|
||||
* Current client options
|
||||
*/
|
||||
options: Bun.__internal.DistributedMerge<SQL.Options>;
|
||||
|
||||
/**
|
||||
* Commits a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
|
||||
*
|
||||
* @param name - The name of the distributed transaction
|
||||
*
|
||||
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await sql.commitDistributed("my_distributed_transaction");
|
||||
* ```
|
||||
*/
|
||||
commitDistributed(name: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Rolls back a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
|
||||
*
|
||||
* @param name - The name of the distributed transaction
|
||||
*
|
||||
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await sql.rollbackDistributed("my_distributed_transaction");
|
||||
* ```
|
||||
*/
|
||||
rollbackDistributed(name: string): Promise<void>;
|
||||
|
||||
/** Waits for the database connection to be established
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await sql.connect();
|
||||
* ```
|
||||
*/
|
||||
connect(): Promise<SQL>;
|
||||
|
||||
/**
|
||||
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
|
||||
*
|
||||
* @param options - The options for the close
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await sql.close({ timeout: 1 });
|
||||
* ```
|
||||
*/
|
||||
close(options?: { timeout?: number }): Promise<void>;
|
||||
|
||||
/**
|
||||
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
|
||||
* This is an alias of {@link SQL.close}
|
||||
*
|
||||
* @param options - The options for the close
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* await sql.end({ timeout: 1 });
|
||||
* ```
|
||||
*/
|
||||
end(options?: { timeout?: number }): Promise<void>;
|
||||
|
||||
/**
|
||||
* Flushes any pending operations
|
||||
*
|
||||
* @throws {Error} If the adapter does not support flushing (e.g., SQLite)
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* sql.flush();
|
||||
* ```
|
||||
*/
|
||||
flush(): void;
|
||||
|
||||
/**
|
||||
* The reserve method pulls out a connection from the pool, and returns a client that wraps the single connection.
|
||||
*
|
||||
* This can be used for running queries on an isolated connection.
|
||||
* Calling reserve in a reserved Sql will return a new reserved connection, not the same connection (behavior matches postgres package).
|
||||
*
|
||||
* @throws {Error} If the adapter does not support connection pooling (e.g., SQLite)s
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const reserved = await sql.reserve();
|
||||
* await reserved`select * from users`;
|
||||
* await reserved.release();
|
||||
* // with in a production scenario would be something more like
|
||||
* const reserved = await sql.reserve();
|
||||
* try {
|
||||
* // ... queries
|
||||
* } finally {
|
||||
* await reserved.release();
|
||||
* }
|
||||
*
|
||||
* // Bun supports Symbol.dispose and Symbol.asyncDispose
|
||||
* // always release after context (safer)
|
||||
* using reserved = await sql.reserve()
|
||||
* await reserved`select * from users`
|
||||
* ```
|
||||
*/
|
||||
reserve(): Promise<ReservedSQL>;
|
||||
|
||||
/**
|
||||
* Begins a new transaction.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @example
|
||||
* const [user, account] = await sql.begin(async sql => {
|
||||
* const [user] = await sql`
|
||||
* insert into users (
|
||||
* name
|
||||
* ) values (
|
||||
* 'Murray'
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* const [account] = await sql`
|
||||
* insert into accounts (
|
||||
* user_id
|
||||
* ) values (
|
||||
* ${ user.user_id }
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
begin<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Begins a new transaction with options.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @example
|
||||
* const [user, account] = await sql.begin("read write", async sql => {
|
||||
* const [user] = await sql`
|
||||
* insert into users (
|
||||
* name
|
||||
* ) values (
|
||||
* 'Murray'
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* const [account] = await sql`
|
||||
* insert into accounts (
|
||||
* user_id
|
||||
* ) values (
|
||||
* ${ user.user_id }
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
begin<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Alternative method to begin a transaction.
|
||||
*
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
* @alias begin
|
||||
* @example
|
||||
* const [user, account] = await sql.transaction(async sql => {
|
||||
* const [user] = await sql`
|
||||
* insert into users (
|
||||
* name
|
||||
* ) values (
|
||||
* 'Murray'
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* const [account] = await sql`
|
||||
* insert into accounts (
|
||||
* user_id
|
||||
* ) values (
|
||||
* ${ user.user_id }
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* return [user, account]
|
||||
* })
|
||||
*/
|
||||
transaction<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Alternative method to begin a transaction with options
|
||||
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
|
||||
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
|
||||
*
|
||||
* @alias {@link begin}
|
||||
*
|
||||
* @example
|
||||
* const [user, account] = await sql.transaction("read write", async sql => {
|
||||
* const [user] = await sql`
|
||||
* insert into users (
|
||||
* name
|
||||
* ) values (
|
||||
* 'Murray'
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* const [account] = await sql`
|
||||
* insert into accounts (
|
||||
* user_id
|
||||
* ) values (
|
||||
* ${ user.user_id }
|
||||
* )
|
||||
* returning *
|
||||
* `
|
||||
* return [user, account]
|
||||
* });
|
||||
*/
|
||||
transaction<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**
|
||||
* Begins a distributed transaction
|
||||
* Also know as Two-Phase Commit, in a distributed transaction, Phase 1 involves the coordinator preparing nodes by ensuring data is written and ready to commit, while Phase 2 finalizes with nodes committing or rolling back based on the coordinator's decision, ensuring durability and releasing locks.
|
||||
* In PostgreSQL and MySQL distributed transactions persist beyond the original session, allowing privileged users or coordinators to commit/rollback them, ensuring support for distributed transactions, recovery, and administrative tasks.
|
||||
* beginDistributed will automatic rollback if any exception are not caught, and you can commit and rollback later if everything goes well.
|
||||
* PostgreSQL natively supports distributed transactions using PREPARE TRANSACTION, while MySQL uses XA Transactions, and MSSQL also supports distributed/XA transactions. However, in MSSQL, distributed transactions are tied to the original session, the DTC coordinator, and the specific connection.
|
||||
* These transactions are automatically committed or rolled back following the same rules as regular transactions, with no option for manual intervention from other sessions, in MSSQL distributed transactions are used to coordinate transactions using Linked Servers.
|
||||
*
|
||||
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
|
||||
*
|
||||
* @example
|
||||
* await sql.beginDistributed("numbers", async sql => {
|
||||
* await sql`create table if not exists numbers (a int)`;
|
||||
* await sql`insert into numbers values(1)`;
|
||||
* });
|
||||
* // later you can call
|
||||
* await sql.commitDistributed("numbers");
|
||||
* // or await sql.rollbackDistributed("numbers");
|
||||
*/
|
||||
beginDistributed<const T>(
|
||||
name: string,
|
||||
fn: SQL.TransactionContextCallback<T>,
|
||||
): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/** Alternative method to begin a distributed transaction
|
||||
* @alias {@link beginDistributed}
|
||||
*/
|
||||
distributed<const T>(name: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
|
||||
|
||||
/**If you know what you're doing, you can use unsafe to pass any string you'd like.
|
||||
* Please note that this can lead to SQL injection if you're not careful.
|
||||
* You can also nest sql.unsafe within a safe sql expression. This is useful if only part of your fraction has unsafe elements.
|
||||
* @example
|
||||
* const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`)
|
||||
*/
|
||||
unsafe<T = any>(string: string, values?: any[]): SQL.Query<T>;
|
||||
|
||||
/**
|
||||
* Reads a file and uses the contents as a query.
|
||||
* Optional parameters can be used if the file includes $1, $2, etc
|
||||
* @example
|
||||
* const result = await sql.file("query.sql", [1, 2, 3]);
|
||||
*/
|
||||
file<T = any>(filename: string, values?: any[]): SQL.Query<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL client
|
||||
*/
|
||||
const sql: SQL;
|
||||
|
||||
/**
|
||||
* SQL client for PostgreSQL
|
||||
*
|
||||
* @deprecated Prefer {@link Bun.sql}
|
||||
*/
|
||||
const postgres: SQL;
|
||||
|
||||
/**
|
||||
* Represents a savepoint within a transaction
|
||||
*/
|
||||
interface SavepointSQL extends SQL {}
|
||||
}
|
||||
190
packages/bun-types/sqlite.d.ts
vendored
190
packages/bun-types/sqlite.d.ts
vendored
@@ -24,6 +24,66 @@
|
||||
* | `null` | `NULL` |
|
||||
*/
|
||||
declare module "bun:sqlite" {
|
||||
/**
|
||||
* Options for {@link Database}
|
||||
*/
|
||||
export interface DatabaseOptions {
|
||||
/**
|
||||
* Open the database as read-only (no write operations, no create).
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
|
||||
*/
|
||||
readonly?: boolean;
|
||||
|
||||
/**
|
||||
* Allow creating a new database
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
|
||||
*/
|
||||
create?: boolean;
|
||||
|
||||
/**
|
||||
* Open the database as read-write
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
|
||||
*/
|
||||
readwrite?: boolean;
|
||||
|
||||
/**
|
||||
* When set to `true`, integers are returned as `bigint` types.
|
||||
*
|
||||
* When set to `false`, integers are returned as `number` types and truncated to 52 bits.
|
||||
*
|
||||
* @default false
|
||||
* @since v1.1.14
|
||||
*/
|
||||
safeIntegers?: boolean;
|
||||
|
||||
/**
|
||||
* When set to `false` or `undefined`:
|
||||
* - Queries missing bound parameters will NOT throw an error
|
||||
* - Bound named parameters in JavaScript need to exactly match the SQL query.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const db = new Database(":memory:", { strict: false });
|
||||
* db.run("INSERT INTO foo (name) VALUES ($name)", { $name: "foo" });
|
||||
* ```
|
||||
*
|
||||
* When set to `true`:
|
||||
* - Queries missing bound parameters will throw an error
|
||||
* - Bound named parameters in JavaScript no longer need to be `$`, `:`, or `@`. The SQL query will remain prefixed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const db = new Database(":memory:", { strict: true });
|
||||
* db.run("INSERT INTO foo (name) VALUES ($name)", { name: "foo" });
|
||||
* ```
|
||||
* @since v1.1.14
|
||||
*/
|
||||
strict?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* A SQLite3 database
|
||||
*
|
||||
@@ -53,8 +113,6 @@ declare module "bun:sqlite" {
|
||||
* ```ts
|
||||
* const db = new Database("mydb.sqlite", {readonly: true});
|
||||
* ```
|
||||
*
|
||||
* @category Database
|
||||
*/
|
||||
export class Database implements Disposable {
|
||||
/**
|
||||
@@ -63,96 +121,19 @@ declare module "bun:sqlite" {
|
||||
* @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` or undefined for an in-memory database.
|
||||
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
|
||||
*/
|
||||
constructor(
|
||||
filename?: string,
|
||||
options?:
|
||||
| number
|
||||
| {
|
||||
/**
|
||||
* Open the database as read-only (no write operations, no create).
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
|
||||
*/
|
||||
readonly?: boolean;
|
||||
/**
|
||||
* Allow creating a new database
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
|
||||
*/
|
||||
create?: boolean;
|
||||
/**
|
||||
* Open the database as read-write
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
|
||||
*/
|
||||
readwrite?: boolean;
|
||||
|
||||
/**
|
||||
* When set to `true`, integers are returned as `bigint` types.
|
||||
*
|
||||
* When set to `false`, integers are returned as `number` types and truncated to 52 bits.
|
||||
*
|
||||
* @default false
|
||||
* @since v1.1.14
|
||||
*/
|
||||
safeIntegers?: boolean;
|
||||
|
||||
/**
|
||||
* When set to `false` or `undefined`:
|
||||
* - Queries missing bound parameters will NOT throw an error
|
||||
* - Bound named parameters in JavaScript need to exactly match the SQL query.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const db = new Database(":memory:", { strict: false });
|
||||
* db.run("INSERT INTO foo (name) VALUES ($name)", { $name: "foo" });
|
||||
* ```
|
||||
*
|
||||
* When set to `true`:
|
||||
* - Queries missing bound parameters will throw an error
|
||||
* - Bound named parameters in JavaScript no longer need to be `$`, `:`, or `@`. The SQL query will remain prefixed.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const db = new Database(":memory:", { strict: true });
|
||||
* db.run("INSERT INTO foo (name) VALUES ($name)", { name: "foo" });
|
||||
* ```
|
||||
* @since v1.1.14
|
||||
*/
|
||||
strict?: boolean;
|
||||
},
|
||||
);
|
||||
constructor(filename?: string, options?: number | DatabaseOptions);
|
||||
|
||||
/**
|
||||
* Open or create a SQLite3 databases
|
||||
*
|
||||
* @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` or undefined for an in-memory database.
|
||||
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
|
||||
*
|
||||
* This is an alias of `new Database()`
|
||||
*
|
||||
* See {@link Database}
|
||||
*/
|
||||
static open(
|
||||
filename: string,
|
||||
options?:
|
||||
| number
|
||||
| {
|
||||
/**
|
||||
* Open the database as read-only (no write operations, no create).
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
|
||||
*/
|
||||
readonly?: boolean;
|
||||
/**
|
||||
* Allow creating a new database
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
|
||||
*/
|
||||
create?: boolean;
|
||||
/**
|
||||
* Open the database as read-write
|
||||
*
|
||||
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
|
||||
*/
|
||||
readwrite?: boolean;
|
||||
},
|
||||
): Database;
|
||||
static open(filename: string, options?: number | DatabaseOptions): Database;
|
||||
|
||||
/**
|
||||
* Execute a SQL query **without returning any results**.
|
||||
@@ -203,8 +184,11 @@ declare module "bun:sqlite" {
|
||||
* @returns `Database` instance
|
||||
*/
|
||||
run<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
|
||||
|
||||
/**
|
||||
* This is an alias of {@link Database.run}
|
||||
*
|
||||
* @deprecated Prefer {@link Database.run}
|
||||
*/
|
||||
exec<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
|
||||
|
||||
@@ -351,6 +335,16 @@ declare module "bun:sqlite" {
|
||||
*/
|
||||
static setCustomSQLite(path: string): boolean;
|
||||
|
||||
/**
|
||||
* Closes the database when using the async resource proposal
|
||||
*
|
||||
* @example
|
||||
* ```
|
||||
* using db = new Database("myapp.db");
|
||||
* doSomethingWithDatabase(db);
|
||||
* // Automatically closed when `db` goes out of scope
|
||||
* ```
|
||||
*/
|
||||
[Symbol.dispose](): void;
|
||||
|
||||
/**
|
||||
@@ -744,6 +738,30 @@ declare module "bun:sqlite" {
|
||||
*/
|
||||
values(...params: ParamsType): Array<Array<string | bigint | number | boolean | Uint8Array>>;
|
||||
|
||||
/**
|
||||
* Execute the prepared statement and return all results as arrays of
|
||||
* `Uint8Array`s.
|
||||
*
|
||||
* This is similar to `values()` but returns all values as Uint8Array
|
||||
* objects, regardless of their original SQLite type.
|
||||
*
|
||||
* @param params optional values to bind to the statement. If omitted, the
|
||||
* statement is run with the last bound values or no parameters if there are
|
||||
* none.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const stmt = db.prepare("SELECT * FROM foo WHERE bar = ?");
|
||||
*
|
||||
* stmt.raw("baz");
|
||||
* // => [[Uint8Array(24)]]
|
||||
*
|
||||
* stmt.raw();
|
||||
* // => [[Uint8Array(24)]]
|
||||
* ```
|
||||
*/
|
||||
raw(...params: ParamsType): Array<Array<Uint8Array | null>>;
|
||||
|
||||
/**
|
||||
* The names of the columns returned by the prepared statement.
|
||||
* @example
|
||||
|
||||
5
packages/bun-types/test-globals.d.ts
vendored
5
packages/bun-types/test-globals.d.ts
vendored
@@ -3,7 +3,7 @@
|
||||
// This file gets loaded by developers including the following triple slash directive:
|
||||
//
|
||||
// ```ts
|
||||
// /// <reference types="bun/test-globals" />
|
||||
// /// <reference types="bun-types/test-globals" />
|
||||
// ```
|
||||
|
||||
declare var test: typeof import("bun:test").test;
|
||||
@@ -19,3 +19,6 @@ declare var setDefaultTimeout: typeof import("bun:test").setDefaultTimeout;
|
||||
declare var mock: typeof import("bun:test").mock;
|
||||
declare var spyOn: typeof import("bun:test").spyOn;
|
||||
declare var jest: typeof import("bun:test").jest;
|
||||
declare var xit: typeof import("bun:test").xit;
|
||||
declare var xtest: typeof import("bun:test").xtest;
|
||||
declare var xdescribe: typeof import("bun:test").xdescribe;
|
||||
|
||||
51
packages/bun-types/test.d.ts
vendored
51
packages/bun-types/test.d.ts
vendored
@@ -152,11 +152,41 @@ declare module "bun:test" {
|
||||
type SpiedSetter<T> = JestMock.SpiedSetter<T>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a spy on an object property or method
|
||||
*/
|
||||
export function spyOn<T extends object, K extends keyof T>(
|
||||
obj: T,
|
||||
methodOrPropertyValue: K,
|
||||
): Mock<Extract<T[K], (...args: any[]) => any>>;
|
||||
|
||||
/**
|
||||
* Vitest-compatible mocking utilities
|
||||
* Provides Vitest-style mocking API for easier migration from Vitest to Bun
|
||||
*/
|
||||
export const vi: {
|
||||
/**
|
||||
* Create a mock function
|
||||
*/
|
||||
fn: typeof jest.fn;
|
||||
/**
|
||||
* Create a spy on an object property or method
|
||||
*/
|
||||
spyOn: typeof spyOn;
|
||||
/**
|
||||
* Mock a module
|
||||
*/
|
||||
module: typeof mock.module;
|
||||
/**
|
||||
* Restore all mocks to their original implementation
|
||||
*/
|
||||
restoreAllMocks: typeof jest.restoreAllMocks;
|
||||
/**
|
||||
* Clear all mock state (calls, results, etc.) without restoring original implementation
|
||||
*/
|
||||
clearAllMocks: typeof jest.clearAllMocks;
|
||||
};
|
||||
|
||||
interface FunctionLike {
|
||||
readonly name: string;
|
||||
}
|
||||
@@ -262,6 +292,15 @@ declare module "bun:test" {
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
export const describe: Describe;
|
||||
/**
|
||||
* Skips a group of related tests.
|
||||
*
|
||||
* This is equivalent to calling `describe.skip()`.
|
||||
*
|
||||
* @param label the label for the tests
|
||||
* @param fn the function that defines the tests
|
||||
*/
|
||||
export const xdescribe: Describe;
|
||||
/**
|
||||
* Runs a function, once, before all the tests.
|
||||
*
|
||||
@@ -515,7 +554,17 @@ declare module "bun:test" {
|
||||
* @param fn the test function
|
||||
*/
|
||||
export const test: Test;
|
||||
export { test as it };
|
||||
export { test as it, xtest as xit };
|
||||
|
||||
/**
|
||||
* Skips a test.
|
||||
*
|
||||
* This is equivalent to calling `test.skip()`.
|
||||
*
|
||||
* @param label the label for the test
|
||||
* @param fn the test function
|
||||
*/
|
||||
export const xtest: Test;
|
||||
|
||||
/**
|
||||
* Asserts that a value matches some criteria.
|
||||
|
||||
@@ -153,7 +153,7 @@ void us_internal_socket_context_unlink_connecting_socket(int ssl, struct us_sock
|
||||
}
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
void us_internal_socket_context_link_listen_socket(int ssl, struct us_socket_context_t *context, struct us_listen_socket_t *ls) {
|
||||
struct us_socket_t* s = &ls->s;
|
||||
s->context = context;
|
||||
s->next = (struct us_socket_t *) context->head_listen_sockets;
|
||||
@@ -162,7 +162,7 @@ void us_internal_socket_context_link_listen_socket(struct us_socket_context_t *c
|
||||
context->head_listen_sockets->s.prev = s;
|
||||
}
|
||||
context->head_listen_sockets = ls;
|
||||
us_socket_context_ref(0, context);
|
||||
us_socket_context_ref(ssl, context);
|
||||
}
|
||||
|
||||
void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket_context_t *context, struct us_connecting_socket_t *c) {
|
||||
@@ -179,7 +179,7 @@ void us_internal_socket_context_link_connecting_socket(int ssl, struct us_socket
|
||||
|
||||
|
||||
/* We always add in the top, so we don't modify any s.next */
|
||||
void us_internal_socket_context_link_socket(struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
void us_internal_socket_context_link_socket(int ssl, struct us_socket_context_t *context, struct us_socket_t *s) {
|
||||
s->context = context;
|
||||
s->next = context->head_sockets;
|
||||
s->prev = 0;
|
||||
@@ -187,7 +187,7 @@ void us_internal_socket_context_link_socket(struct us_socket_context_t *context,
|
||||
context->head_sockets->prev = s;
|
||||
}
|
||||
context->head_sockets = s;
|
||||
us_socket_context_ref(0, context);
|
||||
us_socket_context_ref(ssl, context);
|
||||
us_internal_enable_sweep_timer(context->loop);
|
||||
}
|
||||
|
||||
@@ -388,7 +388,7 @@ struct us_listen_socket_t *us_socket_context_listen(int ssl, struct us_socket_co
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
s->flags.allow_half_open = (options & LIBUS_SOCKET_ALLOW_HALF_OPEN);
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
|
||||
@@ -423,7 +423,7 @@ struct us_listen_socket_t *us_socket_context_listen_unix(int ssl, struct us_sock
|
||||
s->flags.is_paused = 0;
|
||||
s->flags.is_ipc = 0;
|
||||
s->next = 0;
|
||||
us_internal_socket_context_link_listen_socket(context, ls);
|
||||
us_internal_socket_context_link_listen_socket(ssl, context, ls);
|
||||
|
||||
ls->socket_ext_size = socket_ext_size;
|
||||
|
||||
@@ -456,7 +456,7 @@ struct us_socket_t* us_socket_context_connect_resolved_dns(struct us_socket_cont
|
||||
socket->connect_state = NULL;
|
||||
socket->connect_next = NULL;
|
||||
|
||||
us_internal_socket_context_link_socket(context, socket);
|
||||
us_internal_socket_context_link_socket(0, context, socket);
|
||||
|
||||
return socket;
|
||||
}
|
||||
@@ -584,7 +584,7 @@ int start_connections(struct us_connecting_socket_t *c, int count) {
|
||||
flags->is_paused = 0;
|
||||
flags->is_ipc = 0;
|
||||
/* Link it into context so that timeout fires properly */
|
||||
us_internal_socket_context_link_socket(context, s);
|
||||
us_internal_socket_context_link_socket(0, context, s);
|
||||
|
||||
// TODO check this, specifically how it interacts with the SSL code
|
||||
// does this work when we create multiple sockets at once? will we need multiple SSL contexts?
|
||||
@@ -762,7 +762,7 @@ struct us_socket_t *us_socket_context_connect_unix(int ssl, struct us_socket_con
|
||||
connect_socket->flags.is_ipc = 0;
|
||||
connect_socket->connect_state = NULL;
|
||||
connect_socket->connect_next = NULL;
|
||||
us_internal_socket_context_link_socket(context, connect_socket);
|
||||
us_internal_socket_context_link_socket(ssl, context, connect_socket);
|
||||
|
||||
return connect_socket;
|
||||
}
|
||||
@@ -804,12 +804,9 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
}
|
||||
|
||||
struct us_connecting_socket_t *c = s->connect_state;
|
||||
|
||||
struct us_socket_t *new_s = s;
|
||||
|
||||
if (ext_size != -1) {
|
||||
struct us_poll_t *pool_ref = &s->p;
|
||||
|
||||
new_s = (struct us_socket_t *) us_poll_resize(pool_ref, loop, sizeof(struct us_socket_t) + ext_size);
|
||||
if (c) {
|
||||
c->connecting_head = new_s;
|
||||
@@ -831,7 +828,7 @@ struct us_socket_t *us_socket_context_adopt_socket(int ssl, struct us_socket_con
|
||||
/* We manually ref/unref context to handle context life cycle with low-priority queue */
|
||||
us_socket_context_ref(ssl, context);
|
||||
} else {
|
||||
us_internal_socket_context_link_socket(context, new_s);
|
||||
us_internal_socket_context_link_socket(ssl, context, new_s);
|
||||
}
|
||||
/* We can safely unref the old context here with can potentially be freed */
|
||||
us_socket_context_unref(ssl, old_context);
|
||||
|
||||
@@ -150,16 +150,12 @@ void us_internal_init_loop_ssl_data(us_loop_r loop);
|
||||
void us_internal_free_loop_ssl_data(us_loop_r loop);
|
||||
|
||||
/* Socket context related */
|
||||
void us_internal_socket_context_link_socket(us_socket_context_r context,
|
||||
us_socket_r s);
|
||||
void us_internal_socket_context_unlink_socket(int ssl,
|
||||
us_socket_context_r context, us_socket_r s);
|
||||
void us_internal_socket_context_link_socket(int ssl, us_socket_context_r context, us_socket_r s);
|
||||
void us_internal_socket_context_unlink_socket(int ssl, us_socket_context_r context, us_socket_r s);
|
||||
|
||||
void us_internal_socket_after_resolve(struct us_connecting_socket_t *s);
|
||||
void us_internal_socket_after_open(us_socket_r s, int error);
|
||||
struct us_internal_ssl_socket_t *
|
||||
us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code,
|
||||
void *reason);
|
||||
struct us_internal_ssl_socket_t *us_internal_ssl_socket_close(us_internal_ssl_socket_r s, int code, void *reason);
|
||||
|
||||
int us_internal_handle_dns_results(us_loop_r loop);
|
||||
|
||||
@@ -271,7 +267,7 @@ struct us_listen_socket_t {
|
||||
};
|
||||
|
||||
/* Listen sockets are keps in their own list */
|
||||
void us_internal_socket_context_link_listen_socket(
|
||||
void us_internal_socket_context_link_listen_socket(int ssl,
|
||||
us_socket_context_r context, struct us_listen_socket_t *s);
|
||||
void us_internal_socket_context_unlink_listen_socket(int ssl,
|
||||
us_socket_context_r context, struct us_listen_socket_t *s);
|
||||
@@ -288,8 +284,7 @@ struct us_socket_context_t {
|
||||
struct us_socket_t *iterator;
|
||||
struct us_socket_context_t *prev, *next;
|
||||
|
||||
struct us_socket_t *(*on_open)(struct us_socket_t *, int is_client, char *ip,
|
||||
int ip_length);
|
||||
struct us_socket_t *(*on_open)(struct us_socket_t *, int is_client, char *ip, int ip_length);
|
||||
struct us_socket_t *(*on_data)(struct us_socket_t *, char *data, int length);
|
||||
struct us_socket_t *(*on_fd)(struct us_socket_t *, int fd);
|
||||
struct us_socket_t *(*on_writable)(struct us_socket_t *);
|
||||
@@ -301,7 +296,6 @@ struct us_socket_context_t {
|
||||
struct us_connecting_socket_t *(*on_connect_error)(struct us_connecting_socket_t *, int code);
|
||||
struct us_socket_t *(*on_socket_connect_error)(struct us_socket_t *, int code);
|
||||
int (*is_low_prio)(struct us_socket_t *);
|
||||
|
||||
};
|
||||
|
||||
/* Internal SSL interface */
|
||||
|
||||
@@ -40,7 +40,6 @@ void us_internal_enable_sweep_timer(struct us_loop_t *loop) {
|
||||
us_timer_set(loop->data.sweep_timer, (void (*)(struct us_timer_t *)) sweep_timer_cb, LIBUS_TIMEOUT_GRANULARITY * 1000, LIBUS_TIMEOUT_GRANULARITY * 1000);
|
||||
Bun__internal_ensureDateHeaderTimerIsEnabled(loop);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void us_internal_disable_sweep_timer(struct us_loop_t *loop) {
|
||||
@@ -183,7 +182,7 @@ void us_internal_handle_low_priority_sockets(struct us_loop_t *loop) {
|
||||
if (s->next) s->next->prev = 0;
|
||||
s->next = 0;
|
||||
|
||||
us_internal_socket_context_link_socket(s->context, s);
|
||||
us_internal_socket_context_link_socket(0, s->context, s);
|
||||
us_poll_change(&s->p, us_socket_context(0, s)->loop, us_poll_events(&s->p) | LIBUS_SOCKET_READABLE);
|
||||
|
||||
s->flags.low_prio_state = 2;
|
||||
@@ -340,7 +339,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* We always use nodelay */
|
||||
bsd_socket_nodelay(client_fd, 1);
|
||||
|
||||
us_internal_socket_context_link_socket(listen_socket->s.context, s);
|
||||
us_internal_socket_context_link_socket(0, listen_socket->s.context, s);
|
||||
|
||||
listen_socket->s.context->on_open(s, 0, bsd_addr_get_ip(&addr), bsd_addr_get_ip_length(&addr));
|
||||
|
||||
@@ -364,7 +363,7 @@ void us_internal_dispatch_ready_poll(struct us_poll_t *p, int error, int eof, in
|
||||
/* Note: if we failed a write as a socket of one loop then adopted
|
||||
* to another loop, this will be wrong. Absurd case though */
|
||||
loop->data.last_write_failed = 0;
|
||||
|
||||
|
||||
s = s->context->on_writable(s);
|
||||
|
||||
if (!s || us_socket_is_closed(0, s)) {
|
||||
|
||||
@@ -329,7 +329,7 @@ struct us_socket_t *us_socket_from_fd(struct us_socket_context_t *ctx, int socke
|
||||
bsd_socket_nodelay(fd, 1);
|
||||
apple_no_sigpipe(fd);
|
||||
bsd_set_nonblocking(fd);
|
||||
us_internal_socket_context_link_socket(ctx, s);
|
||||
us_internal_socket_context_link_socket(0, ctx, s);
|
||||
|
||||
return s;
|
||||
#endif
|
||||
|
||||
@@ -298,6 +298,22 @@ public:
|
||||
return std::move(*this);
|
||||
}
|
||||
|
||||
/** Closes all connections connected to this server which are not sending a request or waiting for a response. Does not close the listen socket. */
|
||||
TemplatedApp &&closeIdle() {
|
||||
auto context = (struct us_socket_context_t *)this->httpContext;
|
||||
struct us_socket_t *s = context->head_sockets;
|
||||
while (s) {
|
||||
HttpResponseData<SSL> *httpResponseData = HttpResponse<SSL>::getHttpResponseDataS(s);
|
||||
httpResponseData->shouldCloseOnceIdle = true;
|
||||
struct us_socket_t *next = s->next;
|
||||
if (httpResponseData->isIdle) {
|
||||
us_socket_close(SSL, s, LIBUS_SOCKET_CLOSE_CODE_CLEAN_SHUTDOWN, 0);
|
||||
}
|
||||
s = next;
|
||||
}
|
||||
return std::move(*this);
|
||||
}
|
||||
|
||||
template <typename UserData>
|
||||
TemplatedApp &&ws(std::string_view pattern, WebSocketBehavior<UserData> &&behavior) {
|
||||
/* Don't compile if alignment rules cannot be satisfied */
|
||||
|
||||
@@ -386,6 +386,9 @@ public:
|
||||
/* We do not need to care for buffering here, write does that */
|
||||
return {0, true};
|
||||
}
|
||||
if (length == 0) {
|
||||
return {written, failed};
|
||||
}
|
||||
}
|
||||
|
||||
/* We should only return with new writes, not things written to cork already */
|
||||
|
||||
@@ -137,10 +137,6 @@ private:
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext());
|
||||
}
|
||||
|
||||
static HttpContextData<SSL> *getSocketContextDataS(us_socket_t *s) {
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext(s));
|
||||
}
|
||||
|
||||
/* Init the HttpContext by registering libusockets event handlers */
|
||||
HttpContext<SSL> *init() {
|
||||
|
||||
@@ -247,6 +243,7 @@ private:
|
||||
|
||||
/* Mark that we are inside the parser now */
|
||||
httpContextData->flags.isParsingHttp = true;
|
||||
httpResponseData->isIdle = false;
|
||||
// clients need to know the cursor after http parse, not servers!
|
||||
// how far did we read then? we need to know to continue with websocket parsing data? or?
|
||||
|
||||
@@ -398,6 +395,7 @@ private:
|
||||
/* Timeout on uncork failure */
|
||||
auto [written, failed] = ((AsyncSocket<SSL> *) returnedData)->uncork();
|
||||
if (written > 0 || failed) {
|
||||
httpResponseData->isIdle = true;
|
||||
/* All Http sockets timeout by this, and this behavior match the one in HttpResponse::cork */
|
||||
((HttpResponse<SSL> *) s)->resetTimeout();
|
||||
}
|
||||
@@ -642,6 +640,10 @@ public:
|
||||
}, priority);
|
||||
}
|
||||
|
||||
static HttpContextData<SSL> *getSocketContextDataS(us_socket_t *s) {
|
||||
return (HttpContextData<SSL> *) us_socket_context_ext(SSL, getSocketContext(s));
|
||||
}
|
||||
|
||||
/* Listen to port using this HttpContext */
|
||||
us_listen_socket_t *listen(const char *host, int port, int options) {
|
||||
int error = 0;
|
||||
|
||||
@@ -63,7 +63,6 @@ private:
|
||||
OnSocketClosedCallback onSocketClosed = nullptr;
|
||||
OnClientErrorCallback onClientError = nullptr;
|
||||
|
||||
HttpFlags flags;
|
||||
uint64_t maxHeaderSize = 0; // 0 means no limit
|
||||
|
||||
// TODO: SNI
|
||||
@@ -73,10 +72,8 @@ private:
|
||||
filterHandlers.clear();
|
||||
}
|
||||
|
||||
public:
|
||||
bool isAuthorized() const {
|
||||
return flags.isAuthorized;
|
||||
}
|
||||
public:
|
||||
HttpFlags flags;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
@@ -50,6 +50,11 @@ public:
|
||||
HttpResponseData<SSL> *getHttpResponseData() {
|
||||
return (HttpResponseData<SSL> *) Super::getAsyncSocketData();
|
||||
}
|
||||
|
||||
static HttpResponseData<SSL> *getHttpResponseDataS(us_socket_t *s) {
|
||||
return (HttpResponseData<SSL> *) us_socket_ext(SSL, s);
|
||||
}
|
||||
|
||||
void setTimeout(uint8_t seconds) {
|
||||
auto* data = getHttpResponseData();
|
||||
data->idleTimeout = seconds;
|
||||
@@ -132,7 +137,7 @@ public:
|
||||
|
||||
/* Terminating 0 chunk */
|
||||
Super::write("0\r\n\r\n", 5);
|
||||
httpResponseData->markDone();
|
||||
httpResponseData->markDone(this);
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
if (!Super::isCorked()) {
|
||||
@@ -198,7 +203,7 @@ public:
|
||||
|
||||
/* Remove onAborted function if we reach the end */
|
||||
if (httpResponseData->offset == totalSize) {
|
||||
httpResponseData->markDone();
|
||||
httpResponseData->markDone(this);
|
||||
|
||||
/* We need to check if we should close this socket here now */
|
||||
if (!Super::isCorked()) {
|
||||
|
||||
@@ -22,11 +22,15 @@
|
||||
#include "HttpParser.h"
|
||||
#include "AsyncSocketData.h"
|
||||
#include "ProxyParser.h"
|
||||
#include "HttpContext.h"
|
||||
|
||||
#include "MoveOnlyFunction.h"
|
||||
|
||||
namespace uWS {
|
||||
|
||||
template <bool SSL>
|
||||
struct HttpContext;
|
||||
|
||||
template <bool SSL>
|
||||
struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
template <bool> friend struct HttpResponse;
|
||||
@@ -38,7 +42,7 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
using OnDataCallback = void (*)(uWS::HttpResponse<SSL>* response, const char* chunk, size_t chunk_length, bool, void*);
|
||||
|
||||
/* When we are done with a response we mark it like so */
|
||||
void markDone() {
|
||||
void markDone(uWS::HttpResponse<SSL> *uwsRes) {
|
||||
onAborted = nullptr;
|
||||
/* Also remove onWritable so that we do not emit when draining behind the scenes. */
|
||||
onWritable = nullptr;
|
||||
@@ -50,6 +54,9 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
|
||||
/* We are done with this request */
|
||||
this->state &= ~HttpResponseData<SSL>::HTTP_RESPONSE_PENDING;
|
||||
|
||||
HttpResponseData<SSL> *httpResponseData = uwsRes->getHttpResponseData();
|
||||
httpResponseData->isIdle = true;
|
||||
}
|
||||
|
||||
/* Caller of onWritable. It is possible onWritable calls markDone so we need to borrow it. */
|
||||
@@ -101,6 +108,8 @@ struct HttpResponseData : AsyncSocketData<SSL>, HttpParser {
|
||||
uint8_t state = 0;
|
||||
uint8_t idleTimeout = 10; // default HTTP_TIMEOUT 10 seconds
|
||||
bool fromAncientRequest = false;
|
||||
bool isIdle = true;
|
||||
bool shouldCloseOnceIdle = false;
|
||||
|
||||
|
||||
#ifdef UWS_WITH_PROXY
|
||||
|
||||
@@ -1,17 +1,21 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { spawn as nodeSpawn } from "node:child_process";
|
||||
import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync } from "node:fs";
|
||||
import { basename, join, relative, resolve } from "node:path";
|
||||
import {
|
||||
formatAnnotationToHtml,
|
||||
getSecret,
|
||||
isCI,
|
||||
isWindows,
|
||||
parseAnnotations,
|
||||
printEnvironment,
|
||||
reportAnnotationToBuildKite,
|
||||
startGroup,
|
||||
} from "./utils.mjs";
|
||||
|
||||
if (globalThis.Bun) {
|
||||
await import("./glob-sources.mjs");
|
||||
}
|
||||
|
||||
// https://cmake.org/cmake/help/latest/manual/cmake.1.html#generate-a-project-buildsystem
|
||||
const generateFlags = [
|
||||
["-S", "string", "path to source directory"],
|
||||
@@ -214,14 +218,47 @@ function parseOptions(args, flags = []) {
|
||||
async function spawn(command, args, options, label) {
|
||||
const effectiveArgs = args.filter(Boolean);
|
||||
const description = [command, ...effectiveArgs].map(arg => (arg.includes(" ") ? JSON.stringify(arg) : arg)).join(" ");
|
||||
let env = options?.env;
|
||||
|
||||
console.log("$", description);
|
||||
|
||||
label ??= basename(command);
|
||||
|
||||
const pipe = process.env.CI === "true";
|
||||
|
||||
if (isBuildkite()) {
|
||||
if (process.env.BUN_LINK_ONLY && isWindows) {
|
||||
env ||= options?.env || { ...process.env };
|
||||
|
||||
// Pass signing secrets directly to the build process
|
||||
// The PowerShell signing script will handle certificate decoding
|
||||
env.SM_CLIENT_CERT_PASSWORD = getSecret("SM_CLIENT_CERT_PASSWORD", {
|
||||
redact: true,
|
||||
required: true,
|
||||
});
|
||||
env.SM_CLIENT_CERT_FILE = getSecret("SM_CLIENT_CERT_FILE", {
|
||||
redact: true,
|
||||
required: true,
|
||||
});
|
||||
env.SM_API_KEY = getSecret("SM_API_KEY", {
|
||||
redact: true,
|
||||
required: true,
|
||||
});
|
||||
env.SM_KEYPAIR_ALIAS = getSecret("SM_KEYPAIR_ALIAS", {
|
||||
redact: true,
|
||||
required: true,
|
||||
});
|
||||
env.SM_HOST = getSecret("SM_HOST", {
|
||||
redact: true,
|
||||
required: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const subprocess = nodeSpawn(command, effectiveArgs, {
|
||||
stdio: pipe ? "pipe" : "inherit",
|
||||
...options,
|
||||
env,
|
||||
});
|
||||
|
||||
let killedManually = false;
|
||||
|
||||
107
scripts/buildkite-slow-tests.js
Executable file
107
scripts/buildkite-slow-tests.js
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { readFileSync } from "fs";
|
||||
|
||||
function parseLogFile(filename) {
|
||||
const testDetails = new Map(); // Track individual attempts and total for each test
|
||||
let currentTest = null;
|
||||
let startTime = null;
|
||||
|
||||
// Pattern to match test group start: --- [90m[N/TOTAL][0m test/path
|
||||
// Note: there are escape sequences before _bk
|
||||
const startPattern = /_bk;t=(\d+).*?--- .*?\[90m\[(\d+)\/(\d+)\].*?\[0m (.+)/;
|
||||
|
||||
const content = readFileSync(filename, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const match = line.match(startPattern);
|
||||
if (match) {
|
||||
// If we have a previous test, calculate its duration
|
||||
if (currentTest && startTime) {
|
||||
const endTime = parseInt(match[1]);
|
||||
const duration = endTime - startTime;
|
||||
|
||||
// Extract attempt info - match the actual ANSI pattern
|
||||
const attemptMatch = currentTest.match(/\s+\x1b\[90m\[attempt #(\d+)\]\x1b\[0m$/);
|
||||
const cleanName = currentTest.replace(/\s+\x1b\[90m\[attempt #\d+\]\x1b\[0m$/, "").trim();
|
||||
const attemptNum = attemptMatch ? parseInt(attemptMatch[1]) : 1;
|
||||
|
||||
if (!testDetails.has(cleanName)) {
|
||||
testDetails.set(cleanName, { total: 0, attempts: [] });
|
||||
}
|
||||
|
||||
const testInfo = testDetails.get(cleanName);
|
||||
testInfo.total += duration;
|
||||
testInfo.attempts.push({ attempt: attemptNum, duration });
|
||||
}
|
||||
|
||||
// Start new test
|
||||
startTime = parseInt(match[1]);
|
||||
currentTest = match[4].trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to array and sort by total duration
|
||||
const testGroups = Array.from(testDetails.entries())
|
||||
.map(([name, info]) => ({
|
||||
name,
|
||||
totalDuration: info.total,
|
||||
attempts: info.attempts.sort((a, b) => a.attempt - b.attempt),
|
||||
}))
|
||||
.sort((a, b) => b.totalDuration - a.totalDuration);
|
||||
|
||||
return testGroups;
|
||||
}
|
||||
|
||||
function formatAttempts(attempts) {
|
||||
if (attempts.length <= 1) return "";
|
||||
|
||||
const attemptStrings = attempts.map(
|
||||
({ attempt, duration }) => `${(duration / 1000).toFixed(1)}s attempt #${attempt}`,
|
||||
);
|
||||
return ` [${attemptStrings.join(", ")}]`;
|
||||
}
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.log("Usage: bun parse_test_logs.js <log_file>");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const filename = process.argv[2];
|
||||
const testGroups = parseLogFile(filename);
|
||||
|
||||
const totalTime = testGroups.reduce((sum, group) => sum + group.totalDuration, 0) / 1000;
|
||||
const avgTime = testGroups.length > 0 ? totalTime / testGroups.length : 0;
|
||||
|
||||
console.log(
|
||||
`## Slowest Tests Analysis - ${testGroups.length} tests (${totalTime.toFixed(1)}s total, ${avgTime.toFixed(2)}s avg)`,
|
||||
);
|
||||
console.log("");
|
||||
|
||||
// Top 10 summary
|
||||
console.log("**Top 10 slowest tests:**");
|
||||
for (let i = 0; i < Math.min(10, testGroups.length); i++) {
|
||||
const { name, totalDuration, attempts } = testGroups[i];
|
||||
const durationSec = totalDuration / 1000;
|
||||
const testName = name.replace("test/", "").replace(".test.ts", "").replace(".test.js", "");
|
||||
const attemptInfo = formatAttempts(attempts);
|
||||
console.log(`- **${durationSec.toFixed(1)}s** ${testName}${attemptInfo}`);
|
||||
}
|
||||
|
||||
console.log("");
|
||||
|
||||
// Filter tests > 1 second
|
||||
const slowTests = testGroups.filter(test => test.totalDuration > 1000);
|
||||
|
||||
console.log("```");
|
||||
console.log(`All tests > 1s (${slowTests.length} tests):`);
|
||||
|
||||
for (let i = 0; i < slowTests.length; i++) {
|
||||
const { name, totalDuration, attempts } = slowTests[i];
|
||||
const durationSec = totalDuration / 1000;
|
||||
const attemptInfo = formatAttempts(attempts);
|
||||
console.log(`${(i + 1).toString().padStart(3)}. ${durationSec.toFixed(2).padStart(7)}s ${name}${attemptInfo}`);
|
||||
}
|
||||
|
||||
console.log("```");
|
||||
@@ -565,6 +565,7 @@ async function runTests() {
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
return runTest(title, async () => {
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
@@ -1288,6 +1289,7 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
|
||||
};
|
||||
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
|
||||
env.BUN_JSC_validateExceptionChecks = "1";
|
||||
env.BUN_JSC_dumpSimulatedThrows = "1";
|
||||
}
|
||||
|
||||
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
|
||||
|
||||
@@ -40,7 +40,25 @@ if ($args.Count -gt 0) {
|
||||
$commandArgs = @($args[1..($args.Count - 1)] | % {$_})
|
||||
}
|
||||
|
||||
Write-Host "$ $command $commandArgs"
|
||||
# Don't print the full command as it may contain sensitive information like certificates
|
||||
# Just show the command name and basic info
|
||||
$displayArgs = @()
|
||||
foreach ($arg in $commandArgs) {
|
||||
if ($arg -match "^-") {
|
||||
# Include flags
|
||||
$displayArgs += $arg
|
||||
} elseif ($arg -match "\.(mjs|js|ts|cmake|zig|cpp|c|h|exe)$") {
|
||||
# Include file names
|
||||
$displayArgs += $arg
|
||||
} elseif ($arg.Length -gt 100) {
|
||||
# Truncate long arguments (likely certificates or encoded data)
|
||||
$displayArgs += "[REDACTED]"
|
||||
} else {
|
||||
$displayArgs += $arg
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "$ $command $displayArgs"
|
||||
& $command $commandArgs
|
||||
exit $LASTEXITCODE
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ fn createImportRecord(this: *HTMLScanner, input_path: []const u8, kind: ImportKi
|
||||
const record = ImportRecord{
|
||||
.path = fs.Path.init(try this.allocator.dupeZ(u8, path_to_use)),
|
||||
.kind = kind,
|
||||
.range = logger.Range.None,
|
||||
.range = .none,
|
||||
};
|
||||
|
||||
try this.import_records.push(this.allocator, record);
|
||||
@@ -56,9 +56,9 @@ pub fn onWriteHTML(_: *HTMLScanner, bytes: []const u8) void {
|
||||
pub fn onHTMLParseError(this: *HTMLScanner, message: []const u8) void {
|
||||
this.log.addError(
|
||||
this.source,
|
||||
logger.Loc.Empty,
|
||||
.none,
|
||||
message,
|
||||
) catch bun.outOfMemory();
|
||||
) catch |err| bun.handleOom(err);
|
||||
}
|
||||
|
||||
pub fn onTag(this: *HTMLScanner, _: *lol.Element, path: []const u8, url_attribute: []const u8, kind: ImportKind) void {
|
||||
@@ -222,7 +222,7 @@ pub fn HTMLProcessor(
|
||||
var builder = lol.HTMLRewriter.Builder.init();
|
||||
defer builder.deinit();
|
||||
|
||||
var selectors: std.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_document_tags) 3 else 0) = .{};
|
||||
var selectors: bun.BoundedArray(*lol.HTMLSelector, tag_handlers.len + if (visit_document_tags) 3 else 0) = .{};
|
||||
defer for (selectors.slice()) |selector| {
|
||||
selector.deinit();
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
bytes: []const u8 = "",
|
||||
files: bun.StringArrayHashMap(File),
|
||||
entry_point_id: u32 = 0,
|
||||
compile_exec_argv: []const u8 = "",
|
||||
|
||||
// We never want to hit the filesystem for these files
|
||||
// We use the `/$bunfs/` prefix to indicate that it's a virtual path
|
||||
@@ -43,18 +44,27 @@ pub const StandaloneModuleGraph = struct {
|
||||
};
|
||||
}
|
||||
|
||||
pub fn isBunStandaloneFilePath(str: []const u8) bool {
|
||||
pub fn isBunStandaloneFilePathCanonicalized(str: []const u8) bool {
|
||||
return bun.strings.hasPrefixComptime(str, base_path) or
|
||||
(Environment.isWindows and bun.strings.hasPrefixComptime(str, base_public_path));
|
||||
}
|
||||
|
||||
pub fn isBunStandaloneFilePath(str: []const u8) bool {
|
||||
if (Environment.isWindows) {
|
||||
// On Windows, remove NT path prefixes before checking
|
||||
const canonicalized = strings.withoutNTPrefix(u8, str);
|
||||
return isBunStandaloneFilePathCanonicalized(canonicalized);
|
||||
}
|
||||
return isBunStandaloneFilePathCanonicalized(str);
|
||||
}
|
||||
|
||||
pub fn entryPoint(this: *const StandaloneModuleGraph) *File {
|
||||
return &this.files.values()[this.entry_point_id];
|
||||
}
|
||||
|
||||
// by normalized file path
|
||||
pub fn find(this: *const StandaloneModuleGraph, name: []const u8) ?*File {
|
||||
if (!isBunStandaloneFilePath(base_path)) {
|
||||
if (!isBunStandaloneFilePath(name)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -247,7 +257,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
};
|
||||
|
||||
const source_files = serialized.sourceFileNames();
|
||||
const slices = bun.default_allocator.alloc(?[]u8, source_files.len * 2) catch bun.outOfMemory();
|
||||
const slices = bun.handleOom(bun.default_allocator.alloc(?[]u8, source_files.len * 2));
|
||||
|
||||
const file_names: [][]const u8 = @ptrCast(slices[0..source_files.len]);
|
||||
const decompressed_contents_slice = slices[source_files.len..][0..source_files.len];
|
||||
@@ -279,6 +289,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
byte_count: usize = 0,
|
||||
modules_ptr: bun.StringPointer = .{},
|
||||
entry_point_id: u32 = 0,
|
||||
compile_exec_argv_ptr: bun.StringPointer = .{},
|
||||
};
|
||||
|
||||
const trailer = "\n---- Bun! ----\n";
|
||||
@@ -323,6 +334,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.bytes = raw_bytes[0..offsets.byte_count],
|
||||
.files = modules,
|
||||
.entry_point_id = offsets.entry_point_id,
|
||||
.compile_exec_argv = sliceToZ(raw_bytes, offsets.compile_exec_argv_ptr),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -338,14 +350,14 @@ pub const StandaloneModuleGraph = struct {
|
||||
return bytes[ptr.offset..][0..ptr.length :0];
|
||||
}
|
||||
|
||||
pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile, output_format: bun.options.Format) ![]u8 {
|
||||
pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile, output_format: bun.options.Format, compile_exec_argv: []const u8) ![]u8 {
|
||||
var serialize_trace = bun.perf.trace("StandaloneModuleGraph.serialize");
|
||||
defer serialize_trace.end();
|
||||
|
||||
var entry_point_id: ?usize = null;
|
||||
var string_builder = bun.StringBuilder{};
|
||||
var module_count: usize = 0;
|
||||
for (output_files) |output_file| {
|
||||
for (output_files) |*output_file| {
|
||||
string_builder.countZ(output_file.dest_path);
|
||||
string_builder.countZ(prefix);
|
||||
if (output_file.value == .buffer) {
|
||||
@@ -379,6 +391,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
string_builder.cap += trailer.len;
|
||||
string_builder.cap += 16;
|
||||
string_builder.cap += @sizeOf(Offsets);
|
||||
string_builder.countZ(compile_exec_argv);
|
||||
|
||||
try string_builder.allocate(allocator);
|
||||
|
||||
@@ -391,7 +404,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
var source_map_arena = bun.ArenaAllocator.init(allocator);
|
||||
defer source_map_arena.deinit();
|
||||
|
||||
for (output_files) |output_file| {
|
||||
for (output_files) |*output_file| {
|
||||
if (!output_file.output_kind.isFileInStandaloneMode()) {
|
||||
continue;
|
||||
}
|
||||
@@ -463,6 +476,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
const offsets = Offsets{
|
||||
.entry_point_id = @as(u32, @truncate(entry_point_id.?)),
|
||||
.modules_ptr = string_builder.appendCount(std.mem.sliceAsBytes(modules.items)),
|
||||
.compile_exec_argv_ptr = string_builder.appendCountZ(compile_exec_argv),
|
||||
.byte_count = string_builder.len,
|
||||
};
|
||||
|
||||
@@ -487,15 +501,28 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
const page_size = std.heap.page_size_max;
|
||||
|
||||
pub const InjectOptions = struct {
|
||||
windows_hide_console: bool = false,
|
||||
pub const InjectOptions = bun.options.WindowsOptions;
|
||||
|
||||
pub const CompileResult = union(enum) {
|
||||
success: void,
|
||||
error_message: []const u8,
|
||||
|
||||
pub fn fail(msg: []const u8) CompileResult {
|
||||
return .{ .error_message = msg };
|
||||
}
|
||||
|
||||
pub fn deinit(this: *const @This()) void {
|
||||
if (this.* == .error_message) {
|
||||
bun.default_allocator.free(this.error_message);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) bun.FileDescriptor {
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get temporary file name: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
});
|
||||
|
||||
const cleanup = struct {
|
||||
@@ -525,7 +552,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
bun.copyFile(in, out).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
const file = bun.sys.openFileAtWindows(
|
||||
bun.invalid_fd,
|
||||
@@ -537,7 +564,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
},
|
||||
).unwrap() catch |e| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open temporary file to copy bun into\n{}", .{e});
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
|
||||
break :brk file;
|
||||
@@ -580,7 +607,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
std.fs.path.sep_str,
|
||||
zname,
|
||||
&.{0},
|
||||
}) catch bun.outOfMemory();
|
||||
}) catch |e| bun.handleOom(e);
|
||||
zname = zname_z[0..zname_z.len -| 1 :0];
|
||||
continue;
|
||||
}
|
||||
@@ -591,7 +618,8 @@ pub const StandaloneModuleGraph = struct {
|
||||
}
|
||||
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open temporary file to copy bun into\n{}", .{err});
|
||||
Global.exit(1);
|
||||
// No fd to cleanup yet, just return error
|
||||
return bun.invalid_fd;
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -613,7 +641,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to open bun executable to copy from as read-only\n{}", .{err});
|
||||
cleanup(zname, fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -625,8 +653,9 @@ pub const StandaloneModuleGraph = struct {
|
||||
bun.copyFile(self_fd, fd).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to copy bun executable into temporary file: {s}", .{@errorName(err)});
|
||||
cleanup(zname, fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
|
||||
break :brk fd;
|
||||
};
|
||||
|
||||
@@ -636,18 +665,18 @@ pub const StandaloneModuleGraph = struct {
|
||||
if (input_result.err) |err| {
|
||||
Output.prettyErrorln("Error reading standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
}
|
||||
var macho_file = bun.macho.MachoFile.init(bun.default_allocator, input_result.bytes.items, bytes.len) catch |err| {
|
||||
Output.prettyErrorln("Error initializing standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
defer macho_file.deinit();
|
||||
macho_file.writeSection(bytes) catch |err| {
|
||||
Output.prettyErrorln("Error writing standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
input_result.bytes.deinit();
|
||||
|
||||
@@ -655,7 +684,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.err => |err| {
|
||||
Output.prettyErrorln("Error seeking to start of temporary file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -663,19 +692,19 @@ pub const StandaloneModuleGraph = struct {
|
||||
var file = bun.sys.File{ .handle = cloned_executable_fd };
|
||||
const writer = file.writer();
|
||||
const BufferedWriter = std.io.BufferedWriter(512 * 1024, @TypeOf(writer));
|
||||
var buffered_writer = bun.default_allocator.create(BufferedWriter) catch bun.outOfMemory();
|
||||
var buffered_writer = bun.handleOom(bun.default_allocator.create(BufferedWriter));
|
||||
buffered_writer.* = .{
|
||||
.unbuffered_writer = writer,
|
||||
};
|
||||
macho_file.buildAndSign(buffered_writer.writer()) catch |err| {
|
||||
Output.prettyErrorln("Error writing standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
buffered_writer.flush() catch |err| {
|
||||
Output.prettyErrorln("Error flushing standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
if (comptime !Environment.isWindows) {
|
||||
_ = bun.c.fchmod(cloned_executable_fd.native(), 0o777);
|
||||
@@ -687,18 +716,18 @@ pub const StandaloneModuleGraph = struct {
|
||||
if (input_result.err) |err| {
|
||||
Output.prettyErrorln("Error reading standalone module graph: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
}
|
||||
var pe_file = bun.pe.PEFile.init(bun.default_allocator, input_result.bytes.items) catch |err| {
|
||||
Output.prettyErrorln("Error initializing PE file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
defer pe_file.deinit();
|
||||
pe_file.addBunSection(bytes) catch |err| {
|
||||
Output.prettyErrorln("Error adding Bun section to PE file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
input_result.bytes.deinit();
|
||||
|
||||
@@ -706,7 +735,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.err => |err| {
|
||||
Output.prettyErrorln("Error seeking to start of temporary file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -716,7 +745,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
pe_file.write(writer) catch |err| {
|
||||
Output.prettyErrorln("Error writing PE file: {}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
// Set executable permissions when running on POSIX hosts, even for Windows targets
|
||||
if (comptime !Environment.isWindows) {
|
||||
@@ -730,7 +759,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
total_byte_count = bytes.len + 8 + (Syscall.setFileOffsetToEndWindows(cloned_executable_fd).unwrap() catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to seek to end of temporary file\n{}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
});
|
||||
} else {
|
||||
const seek_position = @as(u64, @intCast(brk: {
|
||||
@@ -739,7 +768,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.err => |err| {
|
||||
Output.prettyErrorln("{}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -766,7 +795,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
},
|
||||
);
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@@ -779,8 +808,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
.err => |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to write to temporary file\n{}", .{err});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
|
||||
Global.exit(1);
|
||||
return bun.invalid_fd;
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -795,12 +823,42 @@ pub const StandaloneModuleGraph = struct {
|
||||
},
|
||||
}
|
||||
|
||||
if (Environment.isWindows and inject_options.windows_hide_console) {
|
||||
if (Environment.isWindows and inject_options.hide_console) {
|
||||
bun.windows.editWin32BinarySubsystem(.{ .handle = cloned_executable_fd }, .windows_gui) catch |err| {
|
||||
Output.err(err, "failed to disable console on executable", .{});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
}
|
||||
|
||||
Global.exit(1);
|
||||
// Set Windows icon and/or metadata if any options are provided (single operation)
|
||||
if (Environment.isWindows and (inject_options.icon != null or
|
||||
inject_options.title != null or
|
||||
inject_options.publisher != null or
|
||||
inject_options.version != null or
|
||||
inject_options.description != null or
|
||||
inject_options.copyright != null))
|
||||
{
|
||||
var zname_buf: bun.OSPathBuffer = undefined;
|
||||
const zname_w = bun.strings.toWPathNormalized(&zname_buf, zname) catch |err| {
|
||||
Output.err(err, "failed to resolve executable path", .{});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
|
||||
// Single call to set all Windows metadata at once
|
||||
bun.windows.rescle.setWindowsMetadata(
|
||||
zname_w.ptr,
|
||||
inject_options.icon,
|
||||
inject_options.title,
|
||||
inject_options.publisher,
|
||||
inject_options.version,
|
||||
inject_options.description,
|
||||
inject_options.copyright,
|
||||
) catch |err| {
|
||||
Output.err(err, "failed to set Windows metadata on executable", .{});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
return bun.invalid_fd;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -816,7 +874,43 @@ pub const StandaloneModuleGraph = struct {
|
||||
var needs_download: bool = true;
|
||||
const dest_z = target.exePath(&exe_path_buf, version_str, env, &needs_download);
|
||||
if (needs_download) {
|
||||
try target.downloadToPath(env, allocator, dest_z);
|
||||
target.downloadToPath(env, allocator, dest_z) catch |err| {
|
||||
// For CLI, provide detailed error messages and exit
|
||||
switch (err) {
|
||||
error.TargetNotFound => {
|
||||
Output.errGeneric(
|
||||
\\Does this target and version of Bun exist?
|
||||
\\
|
||||
\\404 downloading {} from npm registry
|
||||
, .{target.*});
|
||||
},
|
||||
error.NetworkError => {
|
||||
Output.errGeneric(
|
||||
\\Failed to download cross-compilation target.
|
||||
\\
|
||||
\\Network error downloading {} from npm registry
|
||||
, .{target.*});
|
||||
},
|
||||
error.InvalidResponse => {
|
||||
Output.errGeneric(
|
||||
\\Failed to verify the integrity of the downloaded tarball.
|
||||
\\
|
||||
\\The downloaded content for {} appears to be corrupted
|
||||
, .{target.*});
|
||||
},
|
||||
error.ExtractionFailed => {
|
||||
Output.errGeneric(
|
||||
\\Failed to extract the downloaded tarball.
|
||||
\\
|
||||
\\Could not extract executable for {}
|
||||
, .{target.*});
|
||||
},
|
||||
else => {
|
||||
Output.errGeneric("Failed to download {}: {s}", .{ target.*, @errorName(err) });
|
||||
},
|
||||
}
|
||||
return error.DownloadFailed;
|
||||
};
|
||||
}
|
||||
|
||||
return try allocator.dupeZ(u8, dest_z);
|
||||
@@ -831,86 +925,174 @@ pub const StandaloneModuleGraph = struct {
|
||||
outfile: []const u8,
|
||||
env: *bun.DotEnv.Loader,
|
||||
output_format: bun.options.Format,
|
||||
windows_hide_console: bool,
|
||||
windows_icon: ?[]const u8,
|
||||
) !void {
|
||||
const bytes = try toBytes(allocator, module_prefix, output_files, output_format);
|
||||
if (bytes.len == 0) return;
|
||||
windows_options: bun.options.WindowsOptions,
|
||||
compile_exec_argv: []const u8,
|
||||
self_exe_path: ?[]const u8,
|
||||
) !CompileResult {
|
||||
const bytes = toBytes(allocator, module_prefix, output_files, output_format, compile_exec_argv) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to generate module graph bytes: {s}", .{@errorName(err)}) catch "failed to generate module graph bytes");
|
||||
};
|
||||
if (bytes.len == 0) return CompileResult.fail("no output files to bundle");
|
||||
defer allocator.free(bytes);
|
||||
|
||||
const fd = inject(
|
||||
bytes,
|
||||
if (target.isDefault())
|
||||
bun.selfExePath() catch |err| {
|
||||
Output.err(err, "failed to get self executable path", .{});
|
||||
Global.exit(1);
|
||||
}
|
||||
else
|
||||
download(allocator, target, env) catch |err| {
|
||||
Output.err(err, "failed to download cross-compiled bun executable", .{});
|
||||
Global.exit(1);
|
||||
},
|
||||
.{ .windows_hide_console = windows_hide_console },
|
||||
target,
|
||||
);
|
||||
bun.debugAssert(fd.kind == .system);
|
||||
|
||||
if (Environment.isWindows) {
|
||||
var outfile_buf: bun.OSPathBuffer = undefined;
|
||||
const outfile_slice = brk: {
|
||||
const outfile_w = bun.strings.toWPathNormalized(&outfile_buf, std.fs.path.basenameWindows(outfile));
|
||||
bun.assert(outfile_w.ptr == &outfile_buf);
|
||||
const outfile_buf_u16 = bun.reinterpretSlice(u16, &outfile_buf);
|
||||
outfile_buf_u16[outfile_w.len] = 0;
|
||||
break :brk outfile_buf_u16[0..outfile_w.len :0];
|
||||
var free_self_exe = false;
|
||||
const self_exe = if (self_exe_path) |path| brk: {
|
||||
free_self_exe = true;
|
||||
break :brk bun.handleOom(allocator.dupeZ(u8, path));
|
||||
} else if (target.isDefault())
|
||||
bun.selfExePath() catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get self executable path: {s}", .{@errorName(err)}) catch "failed to get self executable path");
|
||||
}
|
||||
else blk: {
|
||||
var exe_path_buf: bun.PathBuffer = undefined;
|
||||
var version_str_buf: [1024]u8 = undefined;
|
||||
const version_str = std.fmt.bufPrintZ(&version_str_buf, "{}", .{target}) catch {
|
||||
return CompileResult.fail("failed to format target version string");
|
||||
};
|
||||
var needs_download: bool = true;
|
||||
const dest_z = target.exePath(&exe_path_buf, version_str, env, &needs_download);
|
||||
|
||||
bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| {
|
||||
if (err == error.EISDIR) {
|
||||
Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.utf16(outfile_slice)});
|
||||
} else {
|
||||
Output.err(err, "failed to move executable to result path", .{});
|
||||
}
|
||||
|
||||
_ = bun.windows.deleteOpenedFile(fd);
|
||||
|
||||
Global.exit(1);
|
||||
};
|
||||
fd.close();
|
||||
|
||||
if (windows_icon) |icon_utf8| {
|
||||
var icon_buf: bun.OSPathBuffer = undefined;
|
||||
const icon = bun.strings.toWPathNormalized(&icon_buf, icon_utf8);
|
||||
bun.windows.rescle.setIcon(outfile_slice, icon) catch {
|
||||
Output.warn("Failed to set executable icon", .{});
|
||||
if (needs_download) {
|
||||
target.downloadToPath(env, allocator, dest_z) catch |err| {
|
||||
const msg = switch (err) {
|
||||
error.TargetNotFound => std.fmt.allocPrint(allocator, "Target platform '{}' is not available for download. Check if this version of Bun supports this target.", .{target}) catch "Target platform not available for download",
|
||||
error.NetworkError => std.fmt.allocPrint(allocator, "Network error downloading executable for '{}'. Check your internet connection and proxy settings.", .{target}) catch "Network error downloading executable",
|
||||
error.InvalidResponse => std.fmt.allocPrint(allocator, "Downloaded file for '{}' appears to be corrupted. Please try again.", .{target}) catch "Downloaded file is corrupted",
|
||||
error.ExtractionFailed => std.fmt.allocPrint(allocator, "Failed to extract executable for '{}'. The download may be incomplete.", .{target}) catch "Failed to extract downloaded executable",
|
||||
error.UnsupportedTarget => std.fmt.allocPrint(allocator, "Target '{}' is not supported", .{target}) catch "Unsupported target",
|
||||
else => std.fmt.allocPrint(allocator, "Failed to download '{}': {s}", .{ target, @errorName(err) }) catch "Download failed",
|
||||
};
|
||||
return CompileResult.fail(msg);
|
||||
};
|
||||
}
|
||||
return;
|
||||
|
||||
free_self_exe = true;
|
||||
break :blk bun.handleOom(allocator.dupeZ(u8, dest_z));
|
||||
};
|
||||
|
||||
defer if (free_self_exe) {
|
||||
allocator.free(self_exe);
|
||||
};
|
||||
|
||||
var fd = inject(
|
||||
bytes,
|
||||
self_exe,
|
||||
windows_options,
|
||||
target,
|
||||
);
|
||||
defer if (fd != bun.invalid_fd) fd.close();
|
||||
bun.debugAssert(fd.kind == .system);
|
||||
|
||||
if (Environment.isPosix) {
|
||||
// Set executable permissions (0o755 = rwxr-xr-x) - makes it executable for owner, readable/executable for group and others
|
||||
_ = Syscall.fchmod(fd, 0o755);
|
||||
}
|
||||
|
||||
if (Environment.isWindows) {
|
||||
// Get the current path of the temp file
|
||||
var temp_buf: bun.PathBuffer = undefined;
|
||||
const temp_path = bun.getFdPath(fd, &temp_buf) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get temp file path: {s}", .{@errorName(err)}) catch "Failed to get temp file path");
|
||||
};
|
||||
|
||||
// Build the absolute destination path
|
||||
// On Windows, we need an absolute path for MoveFileExW
|
||||
// Get the current working directory and join with outfile
|
||||
var cwd_buf: bun.PathBuffer = undefined;
|
||||
const cwd_path = bun.getcwd(&cwd_buf) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to get current directory: {s}", .{@errorName(err)}) catch "Failed to get current directory");
|
||||
};
|
||||
const dest_path = if (std.fs.path.isAbsolute(outfile))
|
||||
outfile
|
||||
else
|
||||
bun.path.joinAbsString(cwd_path, &[_][]const u8{outfile}, .auto);
|
||||
|
||||
// Convert paths to Windows UTF-16
|
||||
var temp_buf_w: bun.OSPathBuffer = undefined;
|
||||
var dest_buf_w: bun.OSPathBuffer = undefined;
|
||||
const temp_w = bun.strings.toWPathNormalized(&temp_buf_w, temp_path);
|
||||
const dest_w = bun.strings.toWPathNormalized(&dest_buf_w, dest_path);
|
||||
|
||||
// Ensure null termination
|
||||
const temp_buf_u16 = bun.reinterpretSlice(u16, &temp_buf_w);
|
||||
const dest_buf_u16 = bun.reinterpretSlice(u16, &dest_buf_w);
|
||||
temp_buf_u16[temp_w.len] = 0;
|
||||
dest_buf_u16[dest_w.len] = 0;
|
||||
|
||||
// Close the file handle before moving (Windows requires this)
|
||||
fd.close();
|
||||
fd = bun.invalid_fd;
|
||||
|
||||
// Move the file using MoveFileExW
|
||||
if (bun.windows.kernel32.MoveFileExW(temp_buf_u16[0..temp_w.len :0].ptr, dest_buf_u16[0..dest_w.len :0].ptr, bun.windows.MOVEFILE_COPY_ALLOWED | bun.windows.MOVEFILE_REPLACE_EXISTING | bun.windows.MOVEFILE_WRITE_THROUGH) == bun.windows.FALSE) {
|
||||
const err = bun.windows.Win32Error.get();
|
||||
if (err.toSystemErrno()) |sys_err| {
|
||||
if (sys_err == .EISDIR) {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}: {s}", .{ dest_path, @tagName(sys_err) }) catch "failed to move executable");
|
||||
}
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to {s}", .{dest_path}) catch "failed to move executable");
|
||||
}
|
||||
}
|
||||
|
||||
// Set Windows icon and/or metadata using unified function
|
||||
if (windows_options.icon != null or
|
||||
windows_options.title != null or
|
||||
windows_options.publisher != null or
|
||||
windows_options.version != null or
|
||||
windows_options.description != null or
|
||||
windows_options.copyright != null)
|
||||
{
|
||||
// The file has been moved to dest_path
|
||||
bun.windows.rescle.setWindowsMetadata(
|
||||
dest_buf_u16[0..dest_w.len :0].ptr,
|
||||
windows_options.icon,
|
||||
windows_options.title,
|
||||
windows_options.publisher,
|
||||
windows_options.version,
|
||||
windows_options.description,
|
||||
windows_options.copyright,
|
||||
) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "Failed to set Windows metadata: {s}", .{@errorName(err)}) catch "Failed to set Windows metadata");
|
||||
};
|
||||
}
|
||||
return .success;
|
||||
}
|
||||
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
const temp_location = bun.getFdPath(fd, &buf) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get path for fd: {s}", .{@errorName(err)});
|
||||
Global.exit(1);
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get path for fd: {s}", .{@errorName(err)}) catch "failed to get path for file descriptor");
|
||||
};
|
||||
const temp_posix = std.posix.toPosixPath(temp_location) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "path too long: {s}", .{@errorName(err)}) catch "path too long");
|
||||
};
|
||||
const outfile_basename = std.fs.path.basename(outfile);
|
||||
const outfile_posix = std.posix.toPosixPath(outfile_basename) catch |err| {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "outfile name too long: {s}", .{@errorName(err)}) catch "outfile name too long");
|
||||
};
|
||||
|
||||
bun.sys.moveFileZWithHandle(
|
||||
fd,
|
||||
bun.FD.cwd(),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(temp_location)), 0),
|
||||
bun.sliceTo(&temp_posix, 0),
|
||||
.fromStdDir(root_dir),
|
||||
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
|
||||
bun.sliceTo(&outfile_posix, 0),
|
||||
) catch |err| {
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
|
||||
} else {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) });
|
||||
}
|
||||
_ = Syscall.unlink(
|
||||
&(try std.posix.toPosixPath(temp_location)),
|
||||
);
|
||||
fd.close();
|
||||
fd = bun.invalid_fd;
|
||||
|
||||
Global.exit(1);
|
||||
_ = Syscall.unlink(&temp_posix);
|
||||
|
||||
if (err == error.IsDir or err == error.EISDIR) {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
|
||||
} else {
|
||||
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) }) catch "failed to rename file");
|
||||
}
|
||||
};
|
||||
|
||||
return .success;
|
||||
}
|
||||
|
||||
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {
|
||||
@@ -1196,7 +1378,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
const compressed_file = compressed_codes[@intCast(index)].slice(this.map.bytes);
|
||||
const size = bun.zstd.getDecompressedSize(compressed_file);
|
||||
|
||||
const bytes = bun.default_allocator.alloc(u8, size) catch bun.outOfMemory();
|
||||
const bytes = bun.handleOom(bun.default_allocator.alloc(u8, size));
|
||||
const result = bun.zstd.decompress(bytes, compressed_file);
|
||||
|
||||
if (result == .err) {
|
||||
@@ -1316,7 +1498,6 @@ const w = std.os.windows;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Environment = bun.Environment;
|
||||
const Global = bun.Global;
|
||||
const Output = bun.Output;
|
||||
const SourceMap = bun.sourcemap;
|
||||
const StringPointer = bun.StringPointer;
|
||||
|
||||
@@ -322,7 +322,7 @@ fn appendFileAssumeCapacity(
|
||||
const watchlist_id = this.watchlist.len;
|
||||
|
||||
const file_path_: string = if (comptime clone_file_path)
|
||||
bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory())
|
||||
bun.asByteSlice(bun.handleOom(this.allocator.dupeZ(u8, file_path)))
|
||||
else
|
||||
file_path;
|
||||
|
||||
@@ -409,7 +409,7 @@ fn appendDirectoryAssumeCapacity(
|
||||
};
|
||||
|
||||
const file_path_: string = if (comptime clone_file_path)
|
||||
bun.asByteSlice(this.allocator.dupeZ(u8, file_path) catch bun.outOfMemory())
|
||||
bun.asByteSlice(bun.handleOom(this.allocator.dupeZ(u8, file_path)))
|
||||
else
|
||||
file_path;
|
||||
|
||||
@@ -529,7 +529,7 @@ pub fn appendFileMaybeLock(
|
||||
}
|
||||
}
|
||||
}
|
||||
this.watchlist.ensureUnusedCapacity(this.allocator, 1 + @as(usize, @intCast(@intFromBool(parent_watch_item == null)))) catch bun.outOfMemory();
|
||||
bun.handleOom(this.watchlist.ensureUnusedCapacity(this.allocator, 1 + @as(usize, @intCast(@intFromBool(parent_watch_item == null)))));
|
||||
|
||||
if (autowatch_parent_dir) {
|
||||
parent_watch_item = parent_watch_item orelse switch (this.appendDirectoryAssumeCapacity(dir_fd, parent_dir, parent_dir_hash, clone_file_path)) {
|
||||
@@ -595,7 +595,7 @@ pub fn addDirectory(
|
||||
return .{ .result = @truncate(idx) };
|
||||
}
|
||||
|
||||
this.watchlist.ensureUnusedCapacity(this.allocator, 1) catch bun.outOfMemory();
|
||||
bun.handleOom(this.watchlist.ensureUnusedCapacity(this.allocator, 1));
|
||||
|
||||
return this.appendDirectoryAssumeCapacity(fd, file_path, hash, clone_file_path);
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user