Compare commits

..

3 Commits

Author SHA1 Message Date
autofix-ci[bot]
652e411338 [autofix.ci] apply automated fixes 2025-08-14 04:29:50 +00:00
Claude Bot
d7bf3a6ceb Add Node.js behavior comparison and additional test case
Enhanced the regression test to document the difference between Node.js
and Bun error messages, and added a test case to ensure custom
prepareStackTrace functions continue to work as expected.
2025-08-14 04:26:16 +00:00
Claude Bot
43e2b32989 Fix Error.prepareStackTrace crash when called with insufficient arguments
Added parameter validation to prevent segmentation fault when
Error.prepareStackTrace() is called without the required second
parameter (callSites array).

Previously, calling Error.prepareStackTrace(error) without the second
parameter would cause a segmentation fault at address 0x4. Now it
properly throws a TypeError with the message "Second argument must
be an array of call sites".

This matches the expected Node.js/V8 behavior where prepareStackTrace
requires both error and callSites parameters.

Fixes #21815

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-14 04:23:34 +00:00
468 changed files with 13150 additions and 118175 deletions

View File

@@ -434,17 +434,11 @@ function getBuildEnv(target, options) {
* @param {PipelineOptions} options
* @returns {string}
*/
function getBuildCommand(target, options, label) {
function getBuildCommand(target, options) {
const { profile } = target;
const buildProfile = profile || "release";
if (target.os === "windows" && label === "build-bun") {
// Only sign release builds, not canary builds (DigiCert charges per signature)
const enableSigning = !options.canary ? " -DENABLE_WINDOWS_CODESIGNING=ON" : "";
return `bun run build:${buildProfile}${enableSigning}`;
}
return `bun run build:${buildProfile}`;
const label = profile || "release";
return `bun run build:${label}`;
}
/**
@@ -540,7 +534,7 @@ function getLinkBunStep(platform, options) {
BUN_LINK_ONLY: "ON",
...getBuildEnv(platform, options),
},
command: `${getBuildCommand(platform, options, "build-bun")} --target bun`,
command: `${getBuildCommand(platform, options)} --target bun`,
};
}

View File

@@ -1,464 +0,0 @@
# Windows Code Signing Script for Bun
# Uses DigiCert KeyLocker for Authenticode signing
# Native PowerShell implementation - no path translation issues
param(
[Parameter(Mandatory=$true)]
[string]$BunProfileExe,
[Parameter(Mandatory=$true)]
[string]$BunExe
)
$ErrorActionPreference = "Stop"
$ProgressPreference = "SilentlyContinue"
# Logging functions
function Log-Info {
param([string]$Message)
Write-Host "[INFO] $Message" -ForegroundColor Cyan
}
function Log-Success {
param([string]$Message)
Write-Host "[SUCCESS] $Message" -ForegroundColor Green
}
function Log-Error {
param([string]$Message)
Write-Host "[ERROR] $Message" -ForegroundColor Red
}
function Log-Debug {
param([string]$Message)
if ($env:DEBUG -eq "true" -or $env:DEBUG -eq "1") {
Write-Host "[DEBUG] $Message" -ForegroundColor Gray
}
}
# Load Visual Studio environment if not already loaded
function Ensure-VSEnvironment {
if ($null -eq $env:VSINSTALLDIR) {
Log-Info "Loading Visual Studio environment..."
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
if (!(Test-Path $vswhere)) {
throw "Command not found: vswhere (did you install Visual Studio?)"
}
$vsDir = & $vswhere -prerelease -latest -property installationPath
if ($null -eq $vsDir) {
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
if ($null -eq $vsDir) {
throw "Visual Studio directory not found."
}
$vsDir = $vsDir.FullName
}
Push-Location $vsDir
try {
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
. $vsShell -Arch amd64 -HostArch amd64
} finally {
Pop-Location
}
Log-Success "Visual Studio environment loaded"
}
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
}
}
# Check for required environment variables
function Check-Environment {
Log-Info "Checking environment variables..."
$required = @{
"SM_API_KEY" = $env:SM_API_KEY
"SM_CLIENT_CERT_PASSWORD" = $env:SM_CLIENT_CERT_PASSWORD
"SM_KEYPAIR_ALIAS" = $env:SM_KEYPAIR_ALIAS
"SM_HOST" = $env:SM_HOST
"SM_CLIENT_CERT_FILE" = $env:SM_CLIENT_CERT_FILE
}
$missing = @()
foreach ($key in $required.Keys) {
if ([string]::IsNullOrEmpty($required[$key])) {
$missing += $key
} else {
Log-Debug "$key is set (length: $($required[$key].Length))"
}
}
if ($missing.Count -gt 0) {
throw "Missing required environment variables: $($missing -join ', ')"
}
Log-Success "All required environment variables are present"
}
# Setup certificate file
function Setup-Certificate {
Log-Info "Setting up certificate..."
# Always try to decode as base64 first
# If it fails, then treat as file path
try {
Log-Info "Attempting to decode certificate as base64..."
Log-Debug "Input string length: $($env:SM_CLIENT_CERT_FILE.Length) characters"
$tempCertPath = Join-Path $env:TEMP "digicert_cert_$(Get-Random).p12"
# Try to decode as base64
$certBytes = [System.Convert]::FromBase64String($env:SM_CLIENT_CERT_FILE)
[System.IO.File]::WriteAllBytes($tempCertPath, $certBytes)
# Validate the decoded certificate size
$fileSize = (Get-Item $tempCertPath).Length
if ($fileSize -lt 100) {
throw "Decoded certificate too small: $fileSize bytes (expected >100 bytes)"
}
# Update environment to point to file
$env:SM_CLIENT_CERT_FILE = $tempCertPath
Log-Success "Certificate decoded and written to: $tempCertPath"
Log-Debug "Decoded certificate file size: $fileSize bytes"
# Register cleanup
$global:TEMP_CERT_PATH = $tempCertPath
} catch {
# If base64 decode fails, check if it's a file path
Log-Info "Base64 decode failed, checking if it's a file path..."
Log-Debug "Decode error: $_"
if (Test-Path $env:SM_CLIENT_CERT_FILE) {
$fileSize = (Get-Item $env:SM_CLIENT_CERT_FILE).Length
# Validate file size
if ($fileSize -lt 100) {
throw "Certificate file too small: $fileSize bytes at $env:SM_CLIENT_CERT_FILE (possibly corrupted)"
}
Log-Info "Using certificate file: $env:SM_CLIENT_CERT_FILE"
Log-Debug "Certificate file size: $fileSize bytes"
} else {
throw "SM_CLIENT_CERT_FILE is neither valid base64 nor an existing file: $env:SM_CLIENT_CERT_FILE"
}
}
}
# Install DigiCert KeyLocker tools
function Install-KeyLocker {
Log-Info "Setting up DigiCert KeyLocker tools..."
# Define our controlled installation directory
$installDir = "C:\BuildTools\DigiCert"
$smctlPath = Join-Path $installDir "smctl.exe"
# Check if already installed in our controlled location
if (Test-Path $smctlPath) {
Log-Success "KeyLocker tools already installed at: $smctlPath"
# Add to PATH if not already there
if ($env:PATH -notlike "*$installDir*") {
$env:PATH = "$installDir;$env:PATH"
Log-Info "Added to PATH: $installDir"
}
return $smctlPath
}
Log-Info "Installing KeyLocker tools to: $installDir"
# Create the installation directory if it doesn't exist
if (!(Test-Path $installDir)) {
Log-Info "Creating installation directory: $installDir"
try {
New-Item -ItemType Directory -Path $installDir -Force | Out-Null
Log-Success "Created directory: $installDir"
} catch {
throw "Failed to create directory $installDir : $_"
}
}
# Download MSI installer
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
Log-Info "Downloading MSI from: $msiUrl"
Log-Info "Downloading to: $msiPath"
try {
# Remove existing MSI if present
if (Test-Path $msiPath) {
Remove-Item $msiPath -Force
Log-Debug "Removed existing MSI file"
}
# Download with progress tracking
$webClient = New-Object System.Net.WebClient
$webClient.DownloadFile($msiUrl, $msiPath)
if (!(Test-Path $msiPath)) {
throw "MSI download failed - file not found"
}
$fileSize = (Get-Item $msiPath).Length
Log-Success "MSI downloaded successfully (size: $fileSize bytes)"
} catch {
throw "Failed to download MSI: $_"
}
# Install MSI
Log-Info "Installing MSI..."
Log-Debug "MSI path: $msiPath"
Log-Debug "File exists: $(Test-Path $msiPath)"
Log-Debug "File size: $((Get-Item $msiPath).Length) bytes"
# Check if running as administrator
$isAdmin = ([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole]::Administrator)
Log-Info "Running as administrator: $isAdmin"
# Install MSI silently to our controlled directory
$arguments = @(
"/i", "`"$msiPath`"",
"/quiet",
"/norestart",
"TARGETDIR=`"$installDir`"",
"INSTALLDIR=`"$installDir`"",
"ACCEPT_EULA=1",
"ADDLOCAL=ALL"
)
Log-Debug "Running: msiexec.exe $($arguments -join ' ')"
Log-Info "Installing to: $installDir"
$process = Start-Process -FilePath "msiexec.exe" -ArgumentList $arguments -Wait -PassThru -NoNewWindow
if ($process.ExitCode -ne 0) {
Log-Error "MSI installation failed with exit code: $($process.ExitCode)"
# Try to get error details from event log
try {
$events = Get-WinEvent -LogName "Application" -MaxEvents 10 |
Where-Object { $_.ProviderName -eq "MsiInstaller" -and $_.TimeCreated -gt (Get-Date).AddMinutes(-1) }
foreach ($event in $events) {
Log-Debug "MSI Event: $($event.Message)"
}
} catch {
Log-Debug "Could not retrieve MSI installation events"
}
throw "MSI installation failed with exit code: $($process.ExitCode)"
}
Log-Success "MSI installation completed"
# Wait for installation to complete
Start-Sleep -Seconds 2
# Verify smctl.exe exists in our controlled location
if (Test-Path $smctlPath) {
Log-Success "KeyLocker tools installed successfully at: $smctlPath"
# Add to PATH
$env:PATH = "$installDir;$env:PATH"
Log-Info "Added to PATH: $installDir"
return $smctlPath
}
# If not in our expected location, check if it installed somewhere in the directory
$found = Get-ChildItem -Path $installDir -Filter "smctl.exe" -Recurse -ErrorAction SilentlyContinue |
Select-Object -First 1
if ($found) {
Log-Success "Found smctl.exe at: $($found.FullName)"
$smctlDir = $found.DirectoryName
$env:PATH = "$smctlDir;$env:PATH"
return $found.FullName
}
throw "KeyLocker tools installation succeeded but smctl.exe not found in $installDir"
}
# Configure KeyLocker
function Configure-KeyLocker {
param([string]$SmctlPath)
Log-Info "Configuring KeyLocker..."
# Verify smctl is accessible
try {
$version = & $SmctlPath --version 2>&1
Log-Debug "smctl version: $version"
} catch {
throw "Failed to run smctl: $_"
}
# Configure KeyLocker credentials and environment
Log-Info "Configuring KeyLocker credentials..."
try {
# Save credentials (API key and password)
Log-Info "Saving credentials to OS store..."
$saveOutput = & $SmctlPath credentials save $env:SM_API_KEY $env:SM_CLIENT_CERT_PASSWORD 2>&1 | Out-String
Log-Debug "Credentials save output: $saveOutput"
if ($saveOutput -like "*Credentials saved*") {
Log-Success "Credentials saved successfully"
}
# Set environment variables for smctl
Log-Info "Setting KeyLocker environment variables..."
$env:SM_HOST = $env:SM_HOST # Already set, but ensure it's available
$env:SM_API_KEY = $env:SM_API_KEY # Already set
$env:SM_CLIENT_CERT_FILE = $env:SM_CLIENT_CERT_FILE # Path to decoded cert file
Log-Debug "SM_HOST: $env:SM_HOST"
Log-Debug "SM_CLIENT_CERT_FILE: $env:SM_CLIENT_CERT_FILE"
# Run health check
Log-Info "Running KeyLocker health check..."
$healthOutput = & $SmctlPath healthcheck 2>&1 | Out-String
Log-Debug "Health check output: $healthOutput"
if ($healthOutput -like "*Healthy*" -or $healthOutput -like "*SUCCESS*" -or $LASTEXITCODE -eq 0) {
Log-Success "KeyLocker health check passed"
} else {
Log-Error "Health check failed: $healthOutput"
# Don't throw here, sometimes healthcheck is flaky but signing still works
}
# Sync certificates to Windows certificate store
Log-Info "Syncing certificates to Windows store..."
$syncOutput = & $SmctlPath windows certsync 2>&1 | Out-String
Log-Debug "Certificate sync output: $syncOutput"
if ($syncOutput -like "*success*" -or $syncOutput -like "*synced*" -or $LASTEXITCODE -eq 0) {
Log-Success "Certificates synced to Windows store"
} else {
Log-Info "Certificate sync output: $syncOutput"
}
} catch {
throw "Failed to configure KeyLocker: $_"
}
}
# Sign an executable
function Sign-Executable {
param(
[string]$ExePath,
[string]$SmctlPath
)
if (!(Test-Path $ExePath)) {
throw "Executable not found: $ExePath"
}
$fileName = Split-Path $ExePath -Leaf
Log-Info "Signing $fileName..."
Log-Debug "Full path: $ExePath"
Log-Debug "File size: $((Get-Item $ExePath).Length) bytes"
# Check if already signed
$existingSig = Get-AuthenticodeSignature $ExePath
if ($existingSig.Status -eq "Valid") {
Log-Info "$fileName is already signed by: $($existingSig.SignerCertificate.Subject)"
Log-Info "Skipping re-signing"
return
}
# Sign the executable using smctl
try {
# smctl sign command with keypair-alias
$signArgs = @(
"sign",
"--keypair-alias", $env:SM_KEYPAIR_ALIAS,
"--input", $ExePath,
"--verbose"
)
Log-Debug "Running: $SmctlPath $($signArgs -join ' ')"
$signOutput = & $SmctlPath $signArgs 2>&1 | Out-String
if ($LASTEXITCODE -ne 0) {
Log-Error "Signing output: $signOutput"
throw "Signing failed with exit code: $LASTEXITCODE"
}
Log-Debug "Signing output: $signOutput"
Log-Success "Signing command completed"
} catch {
throw "Failed to sign $fileName : $_"
}
# Verify signature
$newSig = Get-AuthenticodeSignature $ExePath
if ($newSig.Status -eq "Valid") {
Log-Success "$fileName signed successfully"
Log-Info "Signed by: $($newSig.SignerCertificate.Subject)"
Log-Info "Thumbprint: $($newSig.SignerCertificate.Thumbprint)"
Log-Info "Valid from: $($newSig.SignerCertificate.NotBefore) to $($newSig.SignerCertificate.NotAfter)"
} else {
throw "$fileName signature verification failed: $($newSig.Status) - $($newSig.StatusMessage)"
}
}
# Cleanup function
function Cleanup {
if ($global:TEMP_CERT_PATH -and (Test-Path $global:TEMP_CERT_PATH)) {
try {
Remove-Item $global:TEMP_CERT_PATH -Force
Log-Info "Cleaned up temporary certificate"
} catch {
Log-Error "Failed to cleanup temporary certificate: $_"
}
}
}
# Main execution
try {
Write-Host "========================================" -ForegroundColor Cyan
Write-Host " Windows Code Signing for Bun" -ForegroundColor Cyan
Write-Host "========================================" -ForegroundColor Cyan
# Ensure we're in a VS environment
Ensure-VSEnvironment
# Check environment variables
Check-Environment
# Setup certificate
Setup-Certificate
# Install and configure KeyLocker
$smctlPath = Install-KeyLocker
Configure-KeyLocker -SmctlPath $smctlPath
# Sign both executables
Sign-Executable -ExePath $BunProfileExe -SmctlPath $smctlPath
Sign-Executable -ExePath $BunExe -SmctlPath $smctlPath
Write-Host "========================================" -ForegroundColor Green
Write-Host " Code signing completed successfully!" -ForegroundColor Green
Write-Host "========================================" -ForegroundColor Green
exit 0
} catch {
Log-Error "Code signing failed: $_"
exit 1
} finally {
Cleanup
}

View File

@@ -1,24 +0,0 @@
name: Auto-label Claude PRs
on:
pull_request:
types: [opened]
jobs:
auto-label:
if: github.event.pull_request.user.login == 'robobun'
runs-on: ubuntu-latest
permissions:
contents: read
pull-requests: write
steps:
- name: Add claude label to PRs from robobun
uses: actions/github-script@v7
with:
script: |
github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
labels: ['claude']
});

View File

@@ -13,55 +13,23 @@ on:
jobs:
claude:
if: |
github.repository == 'oven-sh/bun' &&
(
(github.event_name == 'issue_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
(github.event_name == 'pull_request_review_comment' && (github.event.comment.author_association == 'MEMBER' || github.event.comment.author_association == 'OWNER' || github.event.comment.author_association == 'COLLABORATOR')) ||
(github.event_name == 'pull_request_review' && (github.event.review.author_association == 'MEMBER' || github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'COLLABORATOR')) ||
(github.event_name == 'issues' && (github.event.issue.author_association == 'MEMBER' || github.event.issue.author_association == 'OWNER' || github.event.issue.author_association == 'COLLABORATOR'))
) &&
(github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
(github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
(github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
runs-on: claude
env:
IS_SANDBOX: 1
container:
image: localhost:5000/claude-bun:latest
options: --privileged --user 1000:1000
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
steps:
- name: Checkout repository
working-directory: /workspace/bun
run: |
git config --global user.email "claude-bot@bun.sh" && \
git config --global user.name "Claude Bot" && \
git config --global url."git@github.com:".insteadOf "https://github.com/" && \
git config --global url."git@github.com:".insteadOf "http://github.com/" && \
git config --global --add safe.directory /workspace/bun && \
git config --global push.default current && \
git config --global pull.rebase true && \
git config --global init.defaultBranch main && \
git config --global core.editor "vim" && \
git config --global color.ui auto && \
git config --global fetch.prune true && \
git config --global diff.colorMoved zebra && \
git config --global merge.conflictStyle diff3 && \
git config --global rerere.enabled true && \
git config --global core.autocrlf input
git fetch origin ${{ github.event.pull_request.head.sha }}
git checkout ${{ github.event.pull_request.head.ref }}
git reset --hard origin/${{ github.event.pull_request.head.ref }}
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Run Claude Code
id: claude
# TODO: switch this out once they merge their v1
uses: km-anthropic/claude-code-action@v1-dev
uses: anthropics/claude-code-action@beta
with:
timeout_minutes: "180"
claude_args: |
--dangerously-skip-permissions
--system-prompt "You are working on the Bun codebase"
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}

3
.vscode/launch.json generated vendored
View File

@@ -22,9 +22,6 @@
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
// "BUN_JSC_validateExceptionChecks": "1",
// "BUN_JSC_dumpSimulatedThrows": "1",
// "BUN_JSC_unexpectedExceptionStackTraceLimit": "20",
},
"console": "internalConsole",
"sourceMap": {

View File

@@ -168,5 +168,5 @@
"WebKit/WebInspectorUI": true,
},
"git.detectSubmodules": false,
"bun.test.customScript": "./build/debug/bun-debug test",
"bun.test.customScript": "./build/debug/bun-debug test"
}

View File

@@ -43,12 +43,7 @@ Tests use Bun's Jest-compatible test runner with proper test fixtures:
```typescript
import { test, expect } from "bun:test";
import {
bunEnv,
bunExe,
normalizeBunSnapshot,
tempDirWithFiles,
} from "harness";
import { bunEnv, bunExe, tempDirWithFiles } from "harness";
test("my feature", async () => {
// Create temp directory with test files
@@ -61,7 +56,6 @@ test("my feature", async () => {
cmd: [bunExe(), "index.js"],
env: bunEnv,
cwd: dir,
stderr: "pipe",
});
const [stdout, stderr, exitCode] = await Promise.all([
@@ -71,14 +65,11 @@ test("my feature", async () => {
]);
expect(exitCode).toBe(0);
// Prefer snapshot tests over expect(stdout).toBe("hello\n");
expect(normalizeBunSnapshot(stdout, dir)).toMatchInlineSnapshot(`"hello"`);
expect(stdout).toBe("hello\n");
});
```
- Always use `port: 0`. Do not hardcode ports. Do not use your own random port number function.
- Use `normalizeBunSnapshot` to normalize snapshot output of the test.
- NEVER write tests that check for no "panic" or "uncaught exception" or similar in the test output. That is NOT a valid test.
## Code Architecture

2046
Makefile Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -15,13 +15,11 @@
"eventemitter3": "^5.0.0",
"execa": "^8.0.1",
"fast-glob": "3.3.1",
"fastify": "^5.0.0",
"fdir": "^6.1.0",
"mitata": "^1.0.25",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"strip-ansi": "^7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3",
},
@@ -95,18 +93,6 @@
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="],
"@fastify/ajv-compiler": ["@fastify/ajv-compiler@4.0.2", "", { "dependencies": { "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0" } }, "sha512-Rkiu/8wIjpsf46Rr+Fitd3HRP+VsxUFDDeag0hs9L0ksfnwx2g7SPQQTFL0E8Qv+rfXzQOxBJnjUB9ITUDjfWQ=="],
"@fastify/error": ["@fastify/error@4.2.0", "", {}, "sha512-RSo3sVDXfHskiBZKBPRgnQTtIqpi/7zhJOEmAxCiBcM7d0uwdGdxLlsCaLzGs8v8NnxIRlfG0N51p5yFaOentQ=="],
"@fastify/fast-json-stringify-compiler": ["@fastify/fast-json-stringify-compiler@5.0.3", "", { "dependencies": { "fast-json-stringify": "^6.0.0" } }, "sha512-uik7yYHkLr6fxd8hJSZ8c+xF4WafPK+XzneQDPU+D10r5X19GW8lJcom2YijX2+qtFF1ENJlHXKFM9ouXNJYgQ=="],
"@fastify/forwarded": ["@fastify/forwarded@3.0.0", "", {}, "sha512-kJExsp4JCms7ipzg7SJ3y8DwmePaELHxKYtg+tZow+k0znUTf3cb+npgyqm8+ATZOdmfgfydIebPDWM172wfyA=="],
"@fastify/merge-json-schemas": ["@fastify/merge-json-schemas@0.2.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-OA3KGBCy6KtIvLf8DINC5880o5iBlDX4SxzLQS8HorJAbqluzLRn80UXU0bxZn7UOFhFgpRJDasfwn9nG4FG4A=="],
"@fastify/proxy-addr": ["@fastify/proxy-addr@5.0.0", "", { "dependencies": { "@fastify/forwarded": "^3.0.0", "ipaddr.js": "^2.1.0" } }, "sha512-37qVVA1qZ5sgH7KpHkkC4z9SK6StIsIcOmpjvMPXNb3vx2GQxhZocogVYbr2PbbeLCQxYIPDok307xEvRZOzGA=="],
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
@@ -157,20 +143,10 @@
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
"abstract-logging": ["abstract-logging@2.0.1", "", {}, "sha512-2BjRTZxTPvheOvGbBslFSYOUkr+SjPtOnrLP33f+VIWLzezQpZcqVg7ja3L4dBXmzzgwT+a029jRx5PCi3JuiA=="],
"ajv": ["ajv@8.17.1", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", "require-from-string": "^2.0.2" } }, "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g=="],
"ajv-formats": ["ajv-formats@3.0.1", "", { "dependencies": { "ajv": "^8.0.0" } }, "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ=="],
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
"atomic-sleep": ["atomic-sleep@1.0.0", "", {}, "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ=="],
"avvio": ["avvio@9.1.0", "", { "dependencies": { "@fastify/error": "^4.0.0", "fastq": "^1.17.1" } }, "sha512-fYASnYi600CsH/j9EQov7lECAniYiBFiiAtBNuZYLA2leLe9qOvZzqYHFjtIj6gD2VMoMLP14834LFWvr4IfDw=="],
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
@@ -191,16 +167,12 @@
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
"dequal": ["dequal@2.0.3", "", {}, "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA=="],
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
@@ -261,22 +233,10 @@
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
"fast-json-stringify": ["fast-json-stringify@6.0.1", "", { "dependencies": { "@fastify/merge-json-schemas": "^0.2.0", "ajv": "^8.12.0", "ajv-formats": "^3.0.1", "fast-uri": "^3.0.0", "json-schema-ref-resolver": "^2.0.0", "rfdc": "^1.2.0" } }, "sha512-s7SJE83QKBZwg54dIbD5rCtzOBVD43V1ReWXXYqBgwCwHLYAAT0RQc/FmrQglXqWPpz6omtryJQOau5jI4Nrvg=="],
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
"fast-redact": ["fast-redact@3.5.0", "", {}, "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A=="],
"fast-uri": ["fast-uri@3.0.6", "", {}, "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw=="],
"fastify": ["fastify@5.5.0", "", { "dependencies": { "@fastify/ajv-compiler": "^4.0.0", "@fastify/error": "^4.0.0", "@fastify/fast-json-stringify-compiler": "^5.0.0", "@fastify/proxy-addr": "^5.0.0", "abstract-logging": "^2.0.1", "avvio": "^9.0.0", "fast-json-stringify": "^6.0.0", "find-my-way": "^9.0.0", "light-my-request": "^6.0.0", "pino": "^9.0.0", "process-warning": "^5.0.0", "rfdc": "^1.3.1", "secure-json-parse": "^4.0.0", "semver": "^7.6.0", "toad-cache": "^3.7.0" } }, "sha512-ZWSWlzj3K/DcULCnCjEiC2zn2FBPdlZsSA/pnPa/dbUfLvxkD/Nqmb0XXMXLrWkeM4uQPUvjdJpwtXmTfriXqw=="],
"fastq": ["fastq@1.15.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw=="],
"fdir": ["fdir@6.1.0", "", { "peerDependencies": { "picomatch": "2.x" } }, "sha512-274qhz5PxNnA/fybOu6apTCUnM0GnO3QazB6VH+oag/7DQskdYq8lm07ZSm90kEQuWYH5GvjAxGruuHrEr0bcg=="],
@@ -285,8 +245,6 @@
"fill-range": ["fill-range@7.0.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ=="],
"find-my-way": ["find-my-way@9.3.0", "", { "dependencies": { "fast-deep-equal": "^3.1.3", "fast-querystring": "^1.0.0", "safe-regex2": "^5.0.0" } }, "sha512-eRoFWQw+Yv2tuYlK2pjFS2jGXSxSppAs3hSQjfxVKxM5amECzIgYYc1FEI8ZmhSh/Ig+FrKEz43NLRKJjYCZVg=="],
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
"from": ["from@0.1.7", "", {}, "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g=="],
@@ -315,8 +273,6 @@
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
"ipaddr.js": ["ipaddr.js@2.2.0", "", {}, "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA=="],
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
@@ -333,16 +289,10 @@
"jsesc": ["jsesc@2.5.2", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="],
"json-schema-ref-resolver": ["json-schema-ref-resolver@2.0.1", "", { "dependencies": { "dequal": "^2.0.3" } }, "sha512-HG0SIB9X4J8bwbxCbnd5FfPEbcXAJYTi1pBJeP/QPON+w8ovSME8iRG+ElHNxZNX2Qh6eYn1GdzJFS4cDFfx0Q=="],
"json-schema-traverse": ["json-schema-traverse@1.0.0", "", {}, "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug=="],
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
"jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="],
"light-my-request": ["light-my-request@6.6.0", "", { "dependencies": { "cookie": "^1.0.1", "process-warning": "^4.0.0", "set-cookie-parser": "^2.6.0" } }, "sha512-CHYbu8RtboSIoVsHZ6Ye4cj4Aw/yg2oAFimlF7mNvfDV192LR7nDiKtSIfCuLT7KokPSTn/9kfVLm5OGN0A28A=="],
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
@@ -373,8 +323,6 @@
"npm-run-path": ["npm-run-path@5.2.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg=="],
"on-exit-leak-free": ["on-exit-leak-free@2.1.2", "", {}, "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA=="],
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
@@ -387,50 +335,24 @@
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
"pino": ["pino@9.9.0", "", { "dependencies": { "atomic-sleep": "^1.0.0", "fast-redact": "^3.1.1", "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pino-std-serializers": "^7.0.0", "process-warning": "^5.0.0", "quick-format-unescaped": "^4.0.3", "real-require": "^0.2.0", "safe-stable-stringify": "^2.3.1", "sonic-boom": "^4.0.1", "thread-stream": "^3.0.0" }, "bin": { "pino": "bin.js" } }, "sha512-zxsRIQG9HzG+jEljmvmZupOMDUQ0Jpj0yAgE28jQvvrdYTlEaiGwelJpdndMl/MBuRr70heIj83QyqJUWaU8mQ=="],
"pino-abstract-transport": ["pino-abstract-transport@2.0.0", "", { "dependencies": { "split2": "^4.0.0" } }, "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw=="],
"pino-std-serializers": ["pino-std-serializers@7.0.0", "", {}, "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA=="],
"platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="],
"process-warning": ["process-warning@5.0.0", "", {}, "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA=="],
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
"quick-format-unescaped": ["quick-format-unescaped@4.0.4", "", {}, "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg=="],
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
"real-require": ["real-require@0.2.0", "", {}, "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg=="],
"require-from-string": ["require-from-string@2.0.2", "", {}, "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw=="],
"ret": ["ret@0.5.0", "", {}, "sha512-I1XxrZSQ+oErkRR4jYbAyEEu2I0avBvvMM5JN+6EBprOGRCs63ENqZ3vjavq8fBw2+62G5LF5XelKwuJpcvcxw=="],
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
"rfdc": ["rfdc@1.4.1", "", {}, "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA=="],
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
"safe-regex2": ["safe-regex2@5.0.0", "", { "dependencies": { "ret": "~0.5.0" } }, "sha512-YwJwe5a51WlK7KbOJREPdjNrpViQBI3p4T50lfwPuDhZnE3XGVTlGvi+aolc5+RvxDD6bnUmjVsU9n1eboLUYw=="],
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
"secure-json-parse": ["secure-json-parse@4.0.0", "", {}, "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA=="],
"semver": ["semver@6.3.0", "", { "bin": { "semver": "./bin/semver.js" } }, "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="],
"set-cookie-parser": ["set-cookie-parser@2.7.1", "", {}, "sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ=="],
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
@@ -441,12 +363,8 @@
"slash": ["slash@4.0.0", "", {}, "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew=="],
"sonic-boom": ["sonic-boom@4.2.0", "", { "dependencies": { "atomic-sleep": "^1.0.0" } }, "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww=="],
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
"split2": ["split2@4.2.0", "", {}, "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg=="],
"stream-combiner": ["stream-combiner@0.0.4", "", { "dependencies": { "duplexer": "~0.1.1" } }, "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw=="],
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
@@ -457,8 +375,6 @@
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
"thread-stream": ["thread-stream@3.1.0", "", { "dependencies": { "real-require": "^0.2.0" } }, "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A=="],
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
"tinycolor2": ["tinycolor2@1.6.0", "", {}, "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw=="],
@@ -467,8 +383,6 @@
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
"toad-cache": ["toad-cache@3.7.0", "", {}, "sha512-/m8M+2BJUpoJdgAHoG+baCwBT+tf2VraSfkBgl0Y00qIWt41DJ8R5B8nsEw0I58YwF5IZH6z24/2TobDKnqSWw=="],
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
@@ -493,14 +407,8 @@
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
"avvio/fastq": ["fastq@1.19.1", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="],
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"fastify/semver": ["semver@7.7.2", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA=="],
"light-my-request/process-warning": ["process-warning@4.0.1", "", {}, "sha512-3c2LzQ3rY9d0hc1emcsHhfT9Jwz0cChib/QN89oME2R451w5fy3f0afAhERFZAwrbDU43wk12d0ORBpDVME50Q=="],
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],

View File

@@ -18,7 +18,6 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"string-width": "7.1.0",
"strip-ansi": "^7.1.0",
"tinycolor2": "^1.6.0",
"zx": "^7.2.3"
},

View File

@@ -1,77 +0,0 @@
// Benchmark for string fast path optimization in postMessage with Workers
import { bench, run } from "mitata";
import { Worker, isMainThread, parentPort } from "node:worker_threads";
// Test strings of different sizes
const strings = {
small: "Hello world",
medium: Buffer.alloc("Hello World!!!".length * 1024, "Hello World!!!").toString(),
large: Buffer.alloc("Hello World!!!".length * 1024 * 256, "Hello World!!!").toString(),
};
let worker;
let receivedCount = new Int32Array(new SharedArrayBuffer(4));
let sentCount = 0;
function createWorker() {
const workerCode = `
import { parentPort, workerData } from "node:worker_threads";
let int = workerData;
parentPort?.on("message", data => {
Atomics.add(int, 0, 1);
});
`;
worker = new Worker(workerCode, { eval: true, workerData: receivedCount });
worker.on("message", confirmationId => {});
worker.on("error", error => {
console.error("Worker error:", error);
});
}
// Initialize worker before running benchmarks
createWorker();
function fmt(int) {
if (int < 1000) {
return `${int} chars`;
}
if (int < 100000) {
return `${(int / 1024) | 0} KB`;
}
return `${(int / 1024 / 1024) | 0} MB`;
}
// Benchmark postMessage with pure strings (uses fast path)
bench("postMessage(" + fmt(strings.small.length) + " string)", async () => {
sentCount++;
worker.postMessage(strings.small);
});
bench("postMessage(" + fmt(strings.medium.length) + " string)", async () => {
sentCount++;
worker.postMessage(strings.medium);
});
bench("postMessage(" + fmt(strings.large.length) + " string)", async () => {
sentCount++;
worker.postMessage(strings.large);
});
await run();
await new Promise(resolve => setTimeout(resolve, 5000));
if (receivedCount[0] !== sentCount) {
throw new Error("Expected " + receivedCount[0] + " to equal " + sentCount);
}
// Cleanup worker
worker?.terminate();

View File

@@ -1,56 +0,0 @@
// Benchmark for string fast path optimization in postMessage and structuredClone
import { bench, run } from "mitata";
// Test strings of different sizes
const strings = {
small: "Hello world",
medium: "Hello World!!!".repeat(1024).split("").join(""),
large: "Hello World!!!".repeat(1024).repeat(1024).split("").join(""),
};
console.log("String fast path benchmark");
console.log("Comparing pure strings (fast path) vs objects containing strings (traditional)");
console.log("For structuredClone, pure strings should have constant time regardless of size.");
console.log("");
// Benchmark structuredClone with pure strings (uses fast path)
bench("structuredClone small string (fast path)", () => {
structuredClone(strings.small);
});
bench("structuredClone medium string (fast path)", () => {
structuredClone(strings.medium);
});
bench("structuredClone large string (fast path)", () => {
structuredClone(strings.large);
});
// Benchmark structuredClone with objects containing strings (traditional path)
bench("structuredClone object with small string", () => {
structuredClone({ str: strings.small });
});
bench("structuredClone object with medium string", () => {
structuredClone({ str: strings.medium });
});
bench("structuredClone object with large string", () => {
structuredClone({ str: strings.large });
});
// Multiple string cloning benchmark
bench("structuredClone 100 small strings", () => {
for (let i = 0; i < 100; i++) {
structuredClone(strings.small);
}
});
bench("structuredClone 100 small objects", () => {
for (let i = 0; i < 100; i++) {
structuredClone({ str: strings.small });
}
});
await run();

View File

@@ -1,37 +0,0 @@
import npmStripAnsi from "strip-ansi";
import { bench, run } from "../runner.mjs";
let bunStripANSI = null;
if (!process.env.FORCE_NPM) {
bunStripANSI = globalThis?.Bun?.stripANSI;
}
const stripANSI = bunStripANSI || npmStripAnsi;
const formatter = new Intl.NumberFormat();
const format = n => {
return formatter.format(n);
};
const inputs = [
["hello world", "no-ansi"],
["\x1b[31mred\x1b[39m", "ansi"],
["a".repeat(1024 * 16), "long-no-ansi"],
["\x1b[31mred\x1b[39m".repeat(1024 * 16), "long-ansi"],
];
const maxInputLength = Math.max(...inputs.map(([input]) => input.length));
for (const [input, textLabel] of inputs) {
const label = bunStripANSI ? "Bun.stripANSI" : "npm/strip-ansi";
const name = `${label} ${format(input.length).padStart(format(maxInputLength).length, " ")} chars ${textLabel}`;
bench(name, () => {
stripANSI(input);
});
if (bunStripANSI && bunStripANSI(input) !== npmStripAnsi(input)) {
throw new Error("strip-ansi mismatch");
}
}
await run();

View File

@@ -6,7 +6,6 @@
"devDependencies": {
"@lezer/common": "^1.2.3",
"@lezer/cpp": "^1.1.3",
"@types/bun": "workspace:*",
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
"esbuild": "^0.21.4",
"mitata": "^0.1.11",
@@ -40,8 +39,8 @@
},
},
"overrides": {
"bun-types": "workspace:packages/bun-types",
"@types/bun": "workspace:packages/@types/bun",
"bun-types": "workspace:packages/bun-types",
},
"packages": {
"@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.21.5", "", { "os": "aix", "cpu": "ppc64" }, "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ=="],
@@ -148,7 +147,7 @@
"@octokit/webhooks-types": ["@octokit/webhooks-types@7.6.1", "", {}, "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw=="],
"@sentry/types": ["@sentry/types@7.120.4", "", {}, "sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q=="],
"@sentry/types": ["@sentry/types@7.120.3", "", {}, "sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow=="],
"@types/aws-lambda": ["@types/aws-lambda@8.10.152", "", {}, "sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw=="],
@@ -160,9 +159,9 @@
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
"@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
"@types/react": ["@types/react@19.1.10", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-EhBeSYX0Y6ye8pNebpKrwFJq7BoQ8J5SO6NlvNwwHjSj6adXJViPQrKlsyPw7hLBLvckEMO1yxeGdR82YBBlDg=="],
"@types/react": ["@types/react@19.1.8", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-AwAfQ2Wa5bCx9WP8nZL2uMZWod7J7/JSplxbTmBQ5ms6QpqNYm672H0Vu9ZVKVngQ+ii4R/byguVEUZQyeg44g=="],
"aggregate-error": ["aggregate-error@3.1.0", "", { "dependencies": { "clean-stack": "^2.0.0", "indent-string": "^4.0.0" } }, "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA=="],
@@ -312,7 +311,7 @@
"uglify-js": ["uglify-js@3.19.3", "", { "bin": { "uglifyjs": "bin/uglifyjs" } }, "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ=="],
"undici-types": ["undici-types@7.10.0", "", {}, "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag=="],
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
"universal-github-app-jwt": ["universal-github-app-jwt@1.2.0", "", { "dependencies": { "@types/jsonwebtoken": "^9.0.0", "jsonwebtoken": "^9.0.2" } }, "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g=="],
@@ -334,6 +333,8 @@
"@octokit/webhooks/@octokit/webhooks-methods": ["@octokit/webhooks-methods@4.1.0", "", {}, "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ=="],
"bun-tracestrings/typescript": ["typescript@5.8.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ=="],
"camel-case/no-case": ["no-case@2.3.2", "", { "dependencies": { "lower-case": "^1.1.1" } }, "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ=="],
"change-case/camel-case": ["camel-case@4.1.2", "", { "dependencies": { "pascal-case": "^3.1.2", "tslib": "^2.0.3" } }, "sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw=="],

View File

@@ -57,23 +57,6 @@ else()
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
endif()
# Windows Code Signing Option
if(WIN32)
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
if(ENABLE_WINDOWS_CODESIGNING)
message(STATUS "Windows code signing: ENABLED")
# Check for required environment variables
if(NOT DEFINED ENV{SM_API_KEY})
message(WARNING "SM_API_KEY not set - code signing may fail")
endif()
if(NOT DEFINED ENV{SM_CLIENT_CERT_FILE})
message(WARNING "SM_CLIENT_CERT_FILE not set - code signing may fail")
endif()
endif()
endif()
if(LINUX)
if(EXISTS "/etc/alpine-release")
set(DEFAULT_ABI "musl")

View File

@@ -42,7 +42,6 @@ src/bun.js/bindings/DOMURL.cpp
src/bun.js/bindings/DOMWrapperWorld.cpp
src/bun.js/bindings/DoubleFormatter.cpp
src/bun.js/bindings/EncodeURIComponent.cpp
src/bun.js/bindings/EncodingTables.cpp
src/bun.js/bindings/ErrorCode.cpp
src/bun.js/bindings/ErrorStackFrame.cpp
src/bun.js/bindings/ErrorStackTrace.cpp
@@ -193,16 +192,7 @@ src/bun.js/bindings/ServerRouteList.cpp
src/bun.js/bindings/spawn.cpp
src/bun.js/bindings/SQLClient.cpp
src/bun.js/bindings/sqlite/JSSQLStatement.cpp
src/bun.js/bindings/stripANSI.cpp
src/bun.js/bindings/Strong.cpp
src/bun.js/bindings/TextCodec.cpp
src/bun.js/bindings/TextCodecCJK.cpp
src/bun.js/bindings/TextCodecReplacement.cpp
src/bun.js/bindings/TextCodecSingleByte.cpp
src/bun.js/bindings/TextCodecUserDefined.cpp
src/bun.js/bindings/TextCodecWrapper.cpp
src/bun.js/bindings/TextEncoding.cpp
src/bun.js/bindings/TextEncodingRegistry.cpp
src/bun.js/bindings/Uint8Array.cpp
src/bun.js/bindings/Undici.cpp
src/bun.js/bindings/URLDecomposition.cpp

View File

@@ -65,12 +65,6 @@ src/js/internal/linkedlist.ts
src/js/internal/primordials.js
src/js/internal/promisify.ts
src/js/internal/shared.ts
src/js/internal/sql/errors.ts
src/js/internal/sql/mysql.ts
src/js/internal/sql/postgres.ts
src/js/internal/sql/query.ts
src/js/internal/sql/shared.ts
src/js/internal/sql/sqlite.ts
src/js/internal/stream.promises.ts
src/js/internal/stream.ts
src/js/internal/streams/add-abort-signal.ts

View File

@@ -6,6 +6,7 @@ src/bun.js/api/Glob.classes.ts
src/bun.js/api/h2.classes.ts
src/bun.js/api/html_rewriter.classes.ts
src/bun.js/api/JSBundler.classes.ts
src/bun.js/api/postgres.classes.ts
src/bun.js/api/ResumableSink.classes.ts
src/bun.js/api/S3Client.classes.ts
src/bun.js/api/S3Stat.classes.ts
@@ -14,7 +15,6 @@ src/bun.js/api/Shell.classes.ts
src/bun.js/api/ShellArgs.classes.ts
src/bun.js/api/sockets.classes.ts
src/bun.js/api/sourcemap.classes.ts
src/bun.js/api/sql.classes.ts
src/bun.js/api/streams.classes.ts
src/bun.js/api/valkey.classes.ts
src/bun.js/api/zlib.classes.ts

View File

@@ -98,11 +98,6 @@ src/bun.js/api/bun/spawn.zig
src/bun.js/api/bun/spawn/stdio.zig
src/bun.js/api/bun/ssl_wrapper.zig
src/bun.js/api/bun/subprocess.zig
src/bun.js/api/bun/subprocess/Readable.zig
src/bun.js/api/bun/subprocess/ResourceUsage.zig
src/bun.js/api/bun/subprocess/StaticPipeWriter.zig
src/bun.js/api/bun/subprocess/SubprocessPipeReader.zig
src/bun.js/api/bun/subprocess/Writable.zig
src/bun.js/api/bun/udp_socket.zig
src/bun.js/api/bun/x509.zig
src/bun.js/api/BunObject.zig
@@ -135,7 +130,6 @@ src/bun.js/api/server/StaticRoute.zig
src/bun.js/api/server/WebSocketServerContext.zig
src/bun.js/api/streams.classes.zig
src/bun.js/api/Timer.zig
src/bun.js/api/Timer/DateHeaderTimer.zig
src/bun.js/api/Timer/EventLoopTimer.zig
src/bun.js/api/Timer/ImmediateObject.zig
src/bun.js/api/Timer/TimeoutObject.zig
@@ -197,7 +191,6 @@ src/bun.js/bindings/SourceProvider.zig
src/bun.js/bindings/SourceType.zig
src/bun.js/bindings/static_export.zig
src/bun.js/bindings/SystemError.zig
src/bun.js/bindings/TextCodec.zig
src/bun.js/bindings/URL.zig
src/bun.js/bindings/URLSearchParams.zig
src/bun.js/bindings/VM.zig
@@ -286,81 +279,6 @@ src/bun.js/test/diff_format.zig
src/bun.js/test/diff/diff_match_patch.zig
src/bun.js/test/diff/printDiff.zig
src/bun.js/test/expect.zig
src/bun.js/test/expect/toBe.zig
src/bun.js/test/expect/toBeArray.zig
src/bun.js/test/expect/toBeArrayOfSize.zig
src/bun.js/test/expect/toBeBoolean.zig
src/bun.js/test/expect/toBeCloseTo.zig
src/bun.js/test/expect/toBeDate.zig
src/bun.js/test/expect/toBeDefined.zig
src/bun.js/test/expect/toBeEmpty.zig
src/bun.js/test/expect/toBeEmptyObject.zig
src/bun.js/test/expect/toBeEven.zig
src/bun.js/test/expect/toBeFalse.zig
src/bun.js/test/expect/toBeFalsy.zig
src/bun.js/test/expect/toBeFinite.zig
src/bun.js/test/expect/toBeFunction.zig
src/bun.js/test/expect/toBeGreaterThan.zig
src/bun.js/test/expect/toBeGreaterThanOrEqual.zig
src/bun.js/test/expect/toBeInstanceOf.zig
src/bun.js/test/expect/toBeInteger.zig
src/bun.js/test/expect/toBeLessThan.zig
src/bun.js/test/expect/toBeLessThanOrEqual.zig
src/bun.js/test/expect/toBeNaN.zig
src/bun.js/test/expect/toBeNegative.zig
src/bun.js/test/expect/toBeNil.zig
src/bun.js/test/expect/toBeNull.zig
src/bun.js/test/expect/toBeNumber.zig
src/bun.js/test/expect/toBeObject.zig
src/bun.js/test/expect/toBeOdd.zig
src/bun.js/test/expect/toBeOneOf.zig
src/bun.js/test/expect/toBePositive.zig
src/bun.js/test/expect/toBeString.zig
src/bun.js/test/expect/toBeSymbol.zig
src/bun.js/test/expect/toBeTrue.zig
src/bun.js/test/expect/toBeTruthy.zig
src/bun.js/test/expect/toBeTypeOf.zig
src/bun.js/test/expect/toBeUndefined.zig
src/bun.js/test/expect/toBeValidDate.zig
src/bun.js/test/expect/toBeWithin.zig
src/bun.js/test/expect/toContain.zig
src/bun.js/test/expect/toContainAllKeys.zig
src/bun.js/test/expect/toContainAllValues.zig
src/bun.js/test/expect/toContainAnyKeys.zig
src/bun.js/test/expect/toContainAnyValues.zig
src/bun.js/test/expect/toContainEqual.zig
src/bun.js/test/expect/toContainKey.zig
src/bun.js/test/expect/toContainKeys.zig
src/bun.js/test/expect/toContainValue.zig
src/bun.js/test/expect/toContainValues.zig
src/bun.js/test/expect/toEndWith.zig
src/bun.js/test/expect/toEqual.zig
src/bun.js/test/expect/toEqualIgnoringWhitespace.zig
src/bun.js/test/expect/toHaveBeenCalled.zig
src/bun.js/test/expect/toHaveBeenCalledOnce.zig
src/bun.js/test/expect/toHaveBeenCalledTimes.zig
src/bun.js/test/expect/toHaveBeenCalledWith.zig
src/bun.js/test/expect/toHaveBeenLastCalledWith.zig
src/bun.js/test/expect/toHaveBeenNthCalledWith.zig
src/bun.js/test/expect/toHaveLastReturnedWith.zig
src/bun.js/test/expect/toHaveLength.zig
src/bun.js/test/expect/toHaveNthReturnedWith.zig
src/bun.js/test/expect/toHaveProperty.zig
src/bun.js/test/expect/toHaveReturned.zig
src/bun.js/test/expect/toHaveReturnedTimes.zig
src/bun.js/test/expect/toHaveReturnedWith.zig
src/bun.js/test/expect/toInclude.zig
src/bun.js/test/expect/toIncludeRepeated.zig
src/bun.js/test/expect/toMatch.zig
src/bun.js/test/expect/toMatchInlineSnapshot.zig
src/bun.js/test/expect/toMatchObject.zig
src/bun.js/test/expect/toMatchSnapshot.zig
src/bun.js/test/expect/toSatisfy.zig
src/bun.js/test/expect/toStartWith.zig
src/bun.js/test/expect/toStrictEqual.zig
src/bun.js/test/expect/toThrow.zig
src/bun.js/test/expect/toThrowErrorMatchingInlineSnapshot.zig
src/bun.js/test/expect/toThrowErrorMatchingSnapshot.zig
src/bun.js/test/jest.zig
src/bun.js/test/pretty_format.zig
src/bun.js/test/snapshot.zig
@@ -730,7 +648,6 @@ src/install/PackageManager/patchPackage.zig
src/install/PackageManager/processDependencyList.zig
src/install/PackageManager/ProgressStrings.zig
src/install/PackageManager/runTasks.zig
src/install/PackageManager/security_scanner.zig
src/install/PackageManager/updatePackageJSONAndInstall.zig
src/install/PackageManager/UpdateRequest.zig
src/install/PackageManager/WorkspacePackageJSONCache.zig
@@ -792,11 +709,10 @@ src/Progress.zig
src/ptr.zig
src/ptr/Cow.zig
src/ptr/CowSlice.zig
src/ptr/meta.zig
src/ptr/owned.zig
src/ptr/owned/maybe.zig
src/ptr/owned/meta.zig
src/ptr/ref_count.zig
src/ptr/shared.zig
src/ptr/tagged_pointer.zig
src/ptr/weak_ptr.zig
src/renamer.zig
@@ -884,63 +800,30 @@ src/sourcemap/JSSourceMap.zig
src/sourcemap/LineOffsetTable.zig
src/sourcemap/sourcemap.zig
src/sourcemap/VLQ.zig
src/sql/mysql.zig
src/sql/mysql/AuthMethod.zig
src/sql/mysql/Capabilities.zig
src/sql/mysql/ConnectionState.zig
src/sql/mysql/MySQLConnection.zig
src/sql/mysql/MySQLContext.zig
src/sql/mysql/MySQLQuery.zig
src/sql/mysql/MySQLRequest.zig
src/sql/mysql/MySQLStatement.zig
src/sql/mysql/MySQLTypes.zig
src/sql/mysql/protocol/AnyMySQLError.zig
src/sql/mysql/protocol/Auth.zig
src/sql/mysql/protocol/AuthSwitchRequest.zig
src/sql/mysql/protocol/AuthSwitchResponse.zig
src/sql/mysql/protocol/CharacterSet.zig
src/sql/mysql/protocol/ColumnDefinition41.zig
src/sql/mysql/protocol/CommandType.zig
src/sql/mysql/protocol/DecodeBinaryValue.zig
src/sql/mysql/protocol/EncodeInt.zig
src/sql/mysql/protocol/EOFPacket.zig
src/sql/mysql/protocol/ErrorPacket.zig
src/sql/mysql/protocol/HandshakeResponse41.zig
src/sql/mysql/protocol/HandshakeV10.zig
src/sql/mysql/protocol/LocalInfileRequest.zig
src/sql/mysql/protocol/NewReader.zig
src/sql/mysql/protocol/NewWriter.zig
src/sql/mysql/protocol/OKPacket.zig
src/sql/mysql/protocol/PacketHeader.zig
src/sql/mysql/protocol/PacketType.zig
src/sql/mysql/protocol/PreparedStatement.zig
src/sql/mysql/protocol/Query.zig
src/sql/mysql/protocol/ResultSet.zig
src/sql/mysql/protocol/ResultSetHeader.zig
src/sql/mysql/protocol/Signature.zig
src/sql/mysql/protocol/StackReader.zig
src/sql/mysql/protocol/StmtPrepareOKPacket.zig
src/sql/mysql/SSLMode.zig
src/sql/mysql/StatusFlags.zig
src/sql/mysql/TLSStatus.zig
src/sql/postgres.zig
src/sql/postgres/AnyPostgresError.zig
src/sql/postgres/AuthenticationState.zig
src/sql/postgres/CommandTag.zig
src/sql/postgres/ConnectionFlags.zig
src/sql/postgres/Data.zig
src/sql/postgres/DataCell.zig
src/sql/postgres/DebugSocketMonitorReader.zig
src/sql/postgres/DebugSocketMonitorWriter.zig
src/sql/postgres/ObjectIterator.zig
src/sql/postgres/PostgresCachedStructure.zig
src/sql/postgres/PostgresProtocol.zig
src/sql/postgres/PostgresRequest.zig
src/sql/postgres/PostgresSQLConnection.zig
src/sql/postgres/PostgresSQLContext.zig
src/sql/postgres/PostgresSQLQuery.zig
src/sql/postgres/PostgresSQLQueryResultMode.zig
src/sql/postgres/PostgresSQLStatement.zig
src/sql/postgres/PostgresTypes.zig
src/sql/postgres/protocol/ArrayList.zig
src/sql/postgres/protocol/Authentication.zig
src/sql/postgres/protocol/BackendKeyData.zig
src/sql/postgres/protocol/Close.zig
src/sql/postgres/protocol/ColumnIdentifier.zig
src/sql/postgres/protocol/CommandComplete.zig
src/sql/postgres/protocol/CopyData.zig
src/sql/postgres/protocol/CopyFail.zig
@@ -973,6 +856,7 @@ src/sql/postgres/protocol/StartupMessage.zig
src/sql/postgres/protocol/TransactionStatusIndicator.zig
src/sql/postgres/protocol/WriteWrap.zig
src/sql/postgres/protocol/zHelpers.zig
src/sql/postgres/QueryBindingIterator.zig
src/sql/postgres/SASL.zig
src/sql/postgres/Signature.zig
src/sql/postgres/SocketMonitor.zig
@@ -987,14 +871,6 @@ src/sql/postgres/types/json.zig
src/sql/postgres/types/numeric.zig
src/sql/postgres/types/PostgresString.zig
src/sql/postgres/types/Tag.zig
src/sql/shared/CachedStructure.zig
src/sql/shared/ColumnIdentifier.zig
src/sql/shared/ConnectionFlags.zig
src/sql/shared/Data.zig
src/sql/shared/ObjectIterator.zig
src/sql/shared/QueryBindingIterator.zig
src/sql/shared/SQLDataCell.zig
src/sql/shared/SQLQueryResultMode.zig
src/StandaloneModuleGraph.zig
src/StaticHashMap.zig
src/string.zig

View File

@@ -1205,7 +1205,6 @@ if(NOT BUN_CPP_ONLY)
endif()
if(bunStrip)
# First, strip bun-profile.exe to create bun.exe
register_command(
TARGET
${bun}
@@ -1226,48 +1225,6 @@ if(NOT BUN_CPP_ONLY)
OUTPUTS
${BUILD_PATH}/${bunStripExe}
)
# Then sign both executables on Windows
if(WIN32 AND ENABLE_WINDOWS_CODESIGNING)
set(SIGN_SCRIPT "${CMAKE_SOURCE_DIR}/.buildkite/scripts/sign-windows.ps1")
# Verify signing script exists
if(NOT EXISTS "${SIGN_SCRIPT}")
message(FATAL_ERROR "Windows signing script not found: ${SIGN_SCRIPT}")
endif()
# Use PowerShell for Windows code signing (native Windows, no path issues)
find_program(POWERSHELL_EXECUTABLE
NAMES pwsh.exe powershell.exe
PATHS
"C:/Program Files/PowerShell/7"
"C:/Program Files (x86)/PowerShell/7"
"C:/Windows/System32/WindowsPowerShell/v1.0"
DOC "Path to PowerShell executable"
)
if(NOT POWERSHELL_EXECUTABLE)
set(POWERSHELL_EXECUTABLE "powershell.exe")
endif()
message(STATUS "Using PowerShell executable: ${POWERSHELL_EXECUTABLE}")
# Sign both bun-profile.exe and bun.exe after stripping
register_command(
TARGET
${bun}
TARGET_PHASE
POST_BUILD
COMMENT
"Code signing bun-profile.exe and bun.exe with DigiCert KeyLocker"
COMMAND
"${POWERSHELL_EXECUTABLE}" "-NoProfile" "-ExecutionPolicy" "Bypass" "-File" "${SIGN_SCRIPT}" "-BunProfileExe" "${BUILD_PATH}/${bunExe}" "-BunExe" "${BUILD_PATH}/${bunStripExe}"
CWD
${CMAKE_SOURCE_DIR}
SOURCES
${BUILD_PATH}/${bunStripExe}
)
endif()
endif()
# somehow on some Linux systems we need to disable ASLR for ASAN-instrumented binaries to run

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 53385bda2d2270223ac66f7b021a4aec3dd6df75)
set(WEBKIT_VERSION 684d4551ce5f62683476409d7402424e0f6eafb5)
endif()
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)

View File

@@ -1,20 +1,20 @@
Bun provides native bindings for working with SQL databases through a unified Promise-based API that supports both PostgreSQL and SQLite. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
Bun provides native bindings for working with PostgreSQL databases with a modern, Promise-based API. The interface is designed to be simple and performant, using tagged template literals for queries and offering features like connection pooling, transactions, and prepared statements.
```ts
import { sql, SQL } from "bun";
import { sql } from "bun";
// PostgreSQL (default)
const users = await sql`
SELECT * FROM users
WHERE active = ${true}
LIMIT ${10}
`;
// With a a SQLite db
const sqlite = new SQL("sqlite://myapp.db");
const results = await sqlite`
SELECT * FROM users
WHERE active = ${1}
// Select with multiple conditions
const activeUsers = await sql`
SELECT *
FROM users
WHERE active = ${true}
AND age >= ${18}
`;
```
@@ -44,115 +44,6 @@ const results = await sqlite`
{% /features %}
## Database Support
Bun.SQL provides a unified API for multiple database systems:
### PostgreSQL
PostgreSQL is used when:
- The connection string doesn't match SQLite patterns (it's the fallback adapter)
- The connection string explicitly uses `postgres://` or `postgresql://` protocols
- No connection string is provided and environment variables point to PostgreSQL
```ts
import { sql } from "bun";
// Uses PostgreSQL if DATABASE_URL is not set or is a PostgreSQL URL
await sql`SELECT ...`;
import { SQL } from "bun";
const pg = new SQL("postgres://user:pass@localhost:5432/mydb");
await pg`SELECT ...`;
```
### SQLite
SQLite support is now built into Bun.SQL, providing the same tagged template literal interface as PostgreSQL:
```ts
import { SQL } from "bun";
// In-memory database
const memory = new SQL(":memory:");
const memory2 = new SQL("sqlite://:memory:");
// File-based database
const db = new SQL("sqlite://myapp.db");
// Using options object
const db2 = new SQL({
adapter: "sqlite",
filename: "./data/app.db",
});
// For simple filenames, specify adapter explicitly
const db3 = new SQL("myapp.db", { adapter: "sqlite" });
```
<details>
<summary>SQLite Connection String Formats</summary>
SQLite accepts various URL formats for connection strings:
```ts
// Standard sqlite:// protocol
new SQL("sqlite://path/to/database.db");
new SQL("sqlite:path/to/database.db"); // Without slashes
// file:// protocol (also recognized as SQLite)
new SQL("file://path/to/database.db");
new SQL("file:path/to/database.db");
// Special :memory: database
new SQL(":memory:");
new SQL("sqlite://:memory:");
new SQL("file://:memory:");
// Relative and absolute paths
new SQL("sqlite://./local.db"); // Relative to current directory
new SQL("sqlite://../parent/db.db"); // Parent directory
new SQL("sqlite:///absolute/path.db"); // Absolute path
// With query parameters
new SQL("sqlite://data.db?mode=ro"); // Read-only mode
new SQL("sqlite://data.db?mode=rw"); // Read-write mode (no create)
new SQL("sqlite://data.db?mode=rwc"); // Read-write-create mode (default)
```
**Note:** Simple filenames without a protocol (like `"myapp.db"`) require explicitly specifying `{ adapter: "sqlite" }` to avoid ambiguity with PostgreSQL.
</details>
<details>
<summary>SQLite-Specific Options</summary>
SQLite databases support additional configuration options:
```ts
const db = new SQL({
adapter: "sqlite",
filename: "app.db",
// SQLite-specific options
readonly: false, // Open in read-only mode
create: true, // Create database if it doesn't exist
readwrite: true, // Open for reading and writing
// Additional Bun:sqlite options
strict: true, // Enable strict mode
safeIntegers: false, // Use JavaScript numbers for integers
});
```
Query parameters in the URL are parsed to set these options:
- `?mode=ro``readonly: true`
- `?mode=rw``readonly: false, create: false`
- `?mode=rwc``readonly: false, create: true` (default)
</details>
### Inserting data
You can pass JavaScript values directly to the SQL template literal and escaping will be handled for you.
@@ -360,55 +251,14 @@ await query;
## Database Environment Variables
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence and automatically detects the database type based on the connection string format.
`sql` connection parameters can be configured using environment variables. The client checks these variables in a specific order of precedence.
### Automatic Database Detection
When using `Bun.sql()` without arguments or `new SQL()` with a connection string, the adapter is automatically detected based on the URL format. SQLite becomes the default adapter in these cases:
#### SQLite Auto-Detection
SQLite is automatically selected when the connection string matches these patterns:
- `:memory:` - In-memory database
- `sqlite://...` - SQLite protocol URLs
- `sqlite:...` - SQLite protocol without slashes
- `file://...` - File protocol URLs
- `file:...` - File protocol without slashes
```ts
// These all use SQLite automatically (no adapter needed)
const sql1 = new SQL(":memory:");
const sql2 = new SQL("sqlite://app.db");
const sql3 = new SQL("file://./database.db");
// Works with DATABASE_URL environment variable
DATABASE_URL=":memory:" bun run app.js
DATABASE_URL="sqlite://myapp.db" bun run app.js
DATABASE_URL="file://./data/app.db" bun run app.js
```
#### PostgreSQL Auto-Detection
PostgreSQL is the default for all other connection strings:
```bash
# PostgreSQL is detected for these patterns
DATABASE_URL="postgres://user:pass@localhost:5432/mydb" bun run app.js
DATABASE_URL="postgresql://user:pass@localhost:5432/mydb" bun run app.js
# Or any URL that doesn't match SQLite patterns
DATABASE_URL="localhost:5432/mydb" bun run app.js
```
### PostgreSQL Environment Variables
The following environment variables can be used to define the PostgreSQL connection:
The following environment variables can be used to define the connection URL:
| Environment Variable | Description |
| --------------------------- | ------------------------------------------ |
| `POSTGRES_URL` | Primary connection URL for PostgreSQL |
| `DATABASE_URL` | Alternative connection URL (auto-detected) |
| `DATABASE_URL` | Alternative connection URL |
| `PGURL` | Alternative connection URL |
| `PG_URL` | Alternative connection URL |
| `TLS_POSTGRES_DATABASE_URL` | SSL/TLS-enabled connection URL |
@@ -424,19 +274,6 @@ If no connection URL is provided, the system checks for the following individual
| `PGPASSWORD` | - | (empty) | Database password |
| `PGDATABASE` | - | username | Database name |
### SQLite Environment Variables
SQLite connections can be configured via `DATABASE_URL` when it contains a SQLite-compatible URL:
```bash
# These are all recognized as SQLite
DATABASE_URL=":memory:"
DATABASE_URL="sqlite://./app.db"
DATABASE_URL="file:///absolute/path/to/db.sqlite"
```
**Note:** PostgreSQL-specific environment variables (`POSTGRES_URL`, `PGHOST`, etc.) are ignored when using SQLite.
## Runtime Preconnection
Bun can preconnect to PostgreSQL at startup to improve performance by establishing database connections before your application code runs. This is useful for reducing connection latency on the first database query.
@@ -456,18 +293,16 @@ The `--sql-preconnect` flag will automatically establish a PostgreSQL connection
## Connection Options
You can configure your database connection manually by passing options to the SQL constructor. Options vary depending on the database adapter:
### PostgreSQL Options
You can configure your database connection manually by passing options to the SQL constructor:
```ts
import { SQL } from "bun";
const db = new SQL({
// Connection details (adapter is auto-detected as PostgreSQL)
// Required
url: "postgres://user:pass@localhost:5432/dbname",
// Alternative connection parameters
// Optional configuration
hostname: "localhost",
port: 5432,
database: "myapp",
@@ -495,53 +330,14 @@ const db = new SQL({
// Callbacks
onconnect: client => {
console.log("Connected to PostgreSQL");
console.log("Connected to database");
},
onclose: client => {
console.log("PostgreSQL connection closed");
console.log("Connection closed");
},
});
```
### SQLite Options
```ts
import { SQL } from "bun";
const db = new SQL({
// Required for SQLite
adapter: "sqlite",
filename: "./data/app.db", // or ":memory:" for in-memory database
// SQLite-specific access modes
readonly: false, // Open in read-only mode
create: true, // Create database if it doesn't exist
readwrite: true, // Allow read and write operations
// SQLite data handling
strict: true, // Enable strict mode for better type safety
safeIntegers: false, // Use BigInt for integers exceeding JS number range
// Callbacks
onconnect: client => {
console.log("SQLite database opened");
},
onclose: client => {
console.log("SQLite database closed");
},
});
```
<details>
<summary>SQLite Connection Notes</summary>
- **Connection Pooling**: SQLite doesn't use connection pooling as it's a file-based database. Each `SQL` instance represents a single connection.
- **Transactions**: SQLite supports nested transactions through savepoints, similar to PostgreSQL.
- **Concurrent Access**: SQLite handles concurrent access through file locking. Use WAL mode for better concurrency.
- **Memory Databases**: Using `:memory:` creates a temporary database that exists only for the connection lifetime.
</details>
## Dynamic passwords
When clients need to use alternative authentication schemes such as access tokens or connections to databases with rotating passwords, provide either a synchronous or asynchronous function that will resolve the dynamic password value at connection time.
@@ -557,66 +353,11 @@ const sql = new SQL(url, {
});
```
## SQLite-Specific Features
### Query Execution
SQLite executes queries synchronously, unlike PostgreSQL which uses asynchronous I/O. However, the API remains consistent using Promises:
```ts
const sqlite = new SQL("sqlite://app.db");
// Works the same as PostgreSQL, but executes synchronously under the hood
const users = await sqlite`SELECT * FROM users`;
// Parameters work identically
const user = await sqlite`SELECT * FROM users WHERE id = ${userId}`;
```
### SQLite Pragmas
You can use PRAGMA statements to configure SQLite behavior:
```ts
const sqlite = new SQL("sqlite://app.db");
// Enable foreign keys
await sqlite`PRAGMA foreign_keys = ON`;
// Set journal mode to WAL for better concurrency
await sqlite`PRAGMA journal_mode = WAL`;
// Check integrity
const integrity = await sqlite`PRAGMA integrity_check`;
```
### Data Type Differences
SQLite has a more flexible type system than PostgreSQL:
```ts
// SQLite stores data in 5 storage classes: NULL, INTEGER, REAL, TEXT, BLOB
const sqlite = new SQL("sqlite://app.db");
// SQLite is more lenient with types
await sqlite`
CREATE TABLE flexible (
id INTEGER PRIMARY KEY,
data TEXT, -- Can store numbers as strings
value NUMERIC, -- Can store integers, reals, or text
blob BLOB -- Binary data
)
`;
// JavaScript values are automatically converted
await sqlite`INSERT INTO flexible VALUES (${1}, ${"text"}, ${123.45}, ${Buffer.from("binary")})`;
```
## Transactions
To start a new transaction, use `sql.begin`. This method works for both PostgreSQL and SQLite. For PostgreSQL, it reserves a dedicated connection from the pool. For SQLite, it begins a transaction on the single connection.
To start a new transaction, use `sql.begin`. This method reserves a dedicated connection for the duration of the transaction and provides a scoped `sql` instance to use within the callback function. Once the callback completes, `sql.begin` resolves with the return value of the callback.
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to ensure the process continues smoothly.
The `BEGIN` command is sent automatically, including any optional configurations you specify. If an error occurs during the transaction, a `ROLLBACK` is triggered to release the reserved connection and ensure the process continues smoothly.
### Basic Transactions
@@ -811,34 +552,9 @@ Note that disabling prepared statements may impact performance for queries that
## Error Handling
The client provides typed errors for different failure scenarios. Errors are database-specific and extend from base error classes:
The client provides typed errors for different failure scenarios:
### Error Classes
```ts
import { SQL } from "bun";
try {
await sql`SELECT * FROM users`;
} catch (error) {
if (error instanceof SQL.PostgresError) {
// PostgreSQL-specific error
console.log(error.code); // PostgreSQL error code
console.log(error.detail); // Detailed error message
console.log(error.hint); // Helpful hint from PostgreSQL
} else if (error instanceof SQL.SQLiteError) {
// SQLite-specific error
console.log(error.code); // SQLite error code (e.g., "SQLITE_CONSTRAINT")
console.log(error.errno); // SQLite error number
console.log(error.byteOffset); // Byte offset in SQL statement (if available)
} else if (error instanceof SQL.SQLError) {
// Generic SQL error (base class)
console.log(error.message);
}
}
```
### PostgreSQL Connection Errors
### Connection Errors
| Connection Errors | Description |
| --------------------------------- | ---------------------------------------------------- |
@@ -903,50 +619,6 @@ try {
| `ERR_POSTGRES_UNSAFE_TRANSACTION` | Unsafe transaction operation detected |
| `ERR_POSTGRES_INVALID_TRANSACTION_STATE` | Invalid transaction state |
### SQLite-Specific Errors
SQLite errors provide error codes and numbers that correspond to SQLite's standard error codes:
<details>
<summary>Common SQLite Error Codes</summary>
| Error Code | errno | Description |
| ------------------- | ----- | ---------------------------------------------------- |
| `SQLITE_CONSTRAINT` | 19 | Constraint violation (UNIQUE, CHECK, NOT NULL, etc.) |
| `SQLITE_BUSY` | 5 | Database is locked |
| `SQLITE_LOCKED` | 6 | Table in the database is locked |
| `SQLITE_READONLY` | 8 | Attempt to write to a readonly database |
| `SQLITE_IOERR` | 10 | Disk I/O error |
| `SQLITE_CORRUPT` | 11 | Database disk image is malformed |
| `SQLITE_FULL` | 13 | Database or disk is full |
| `SQLITE_CANTOPEN` | 14 | Unable to open database file |
| `SQLITE_PROTOCOL` | 15 | Database lock protocol error |
| `SQLITE_SCHEMA` | 17 | Database schema has changed |
| `SQLITE_TOOBIG` | 18 | String or BLOB exceeds size limit |
| `SQLITE_MISMATCH` | 20 | Data type mismatch |
| `SQLITE_MISUSE` | 21 | Library used incorrectly |
| `SQLITE_AUTH` | 23 | Authorization denied |
Example error handling:
```ts
const sqlite = new SQL("sqlite://app.db");
try {
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Alice')`;
await sqlite`INSERT INTO users (id, name) VALUES (1, 'Bob')`; // Duplicate ID
} catch (error) {
if (error instanceof SQL.SQLiteError) {
if (error.code === "SQLITE_CONSTRAINT") {
console.log("Constraint violation:", error.message);
// Handle unique constraint violation
}
}
}
```
</details>
## Numbers and BigInt
Bun's SQL client includes special handling for large numbers that exceed the range of a 53-bit integer. Here's how it works:
@@ -980,6 +652,7 @@ There's still some things we haven't finished yet.
- Connection preloading via `--db-preconnect` Bun CLI flag
- MySQL support: [we're working on it](https://github.com/oven-sh/bun/pull/15274)
- SQLite support: planned, but not started. Ideally, we implement it natively instead of wrapping `bun:sqlite`.
- Column name transforms (e.g. `snake_case` to `camelCase`). This is mostly blocked on a unicode-aware implementation of changing the case in C++ using WebKit's `WTF::String`.
- Column type transforms

View File

@@ -772,65 +772,6 @@ console.log(obj); // => { foo: "bar" }
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
## `Bun.stripANSI()` ~6-57x faster `strip-ansi` alternative
`Bun.stripANSI(text: string): string`
Strip ANSI escape codes from a string. This is useful for removing colors and formatting from terminal output.
```ts
const coloredText = "\u001b[31mHello\u001b[0m \u001b[32mWorld\u001b[0m";
const plainText = Bun.stripANSI(coloredText);
console.log(plainText); // => "Hello World"
// Works with various ANSI codes
const formatted = "\u001b[1m\u001b[4mBold and underlined\u001b[0m";
console.log(Bun.stripANSI(formatted)); // => "Bold and underlined"
```
`Bun.stripANSI` is significantly faster than the popular [`strip-ansi`](https://www.npmjs.com/package/strip-ansi) npm package:
```js
> bun bench/snippets/strip-ansi.mjs
cpu: Apple M3 Max
runtime: bun 1.2.21 (arm64-darwin)
benchmark avg (min … max) p75 / p99
------------------------------------------------------- ----------
Bun.stripANSI 11 chars no-ansi 8.13 ns/iter 8.27 ns
(7.45 ns … 33.59 ns) 10.29 ns
Bun.stripANSI 13 chars ansi 51.68 ns/iter 52.51 ns
(46.16 ns … 113.71 ns) 57.71 ns
Bun.stripANSI 16,384 chars long-no-ansi 298.39 ns/iter 305.44 ns
(281.50 ns … 331.65 ns) 320.70 ns
Bun.stripANSI 212,992 chars long-ansi 227.65 µs/iter 234.50 µs
(216.46 µs … 401.92 µs) 262.25 µs
```
```js
> node bench/snippets/strip-ansi.mjs
cpu: Apple M3 Max
runtime: node 24.6.0 (arm64-darwin)
benchmark avg (min … max) p75 / p99
-------------------------------------------------------- ---------
npm/strip-ansi 11 chars no-ansi 466.79 ns/iter 468.67 ns
(454.08 ns … 570.67 ns) 543.67 ns
npm/strip-ansi 13 chars ansi 546.77 ns/iter 550.23 ns
(532.74 ns … 651.08 ns) 590.35 ns
npm/strip-ansi 16,384 chars long-no-ansi 4.85 µs/iter 4.89 µs
(4.71 µs … 5.00 µs) 4.98 µs
npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
(1.27 ms … 1.73 ms) 1.49 ms
```
## `estimateShallowMemoryUsageOf` in `bun:jsc`
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.

View File

@@ -1,5 +1,5 @@
{% callout %}
**🚧** — The `Worker` API is still experimental (particularly for terminating workers). We are actively working on improving this.
**🚧** — The `Worker` API is still experimental and should not be considered ready for production.
{% /callout %}
[`Worker`](https://developer.mozilla.org/en-US/docs/Web/API/Worker) lets you start and communicate with a new JavaScript instance running on a separate thread while sharing I/O resources with the main thread.

View File

@@ -1,81 +0,0 @@
Bun's package manager can scan packages for security vulnerabilities before installation, helping protect your applications from supply chain attacks and known vulnerabilities.
## Quick Start
Configure a security scanner in your `bunfig.toml`:
```toml
[install.security]
scanner = "@acme/bun-security-scanner"
```
When configured, Bun will:
- Scan all packages before installation
- Display security warnings and advisories
- Cancel installation if critical vulnerabilities are found
- Automatically disable auto-install for security
## How It Works
Security scanners analyze packages during `bun install`, `bun add`, and other package operations. They can detect:
- Known security vulnerabilities (CVEs)
- Malicious packages
- License compliance issues
- ...and more!
### Security Levels
Scanners report issues at two severity levels:
- **`fatal`** - Installation stops immediately, exits with non-zero code
- **`warn`** - In interactive terminals, prompts to continue; in CI, exits immediately
## Using Pre-built Scanners
Many security companies publish Bun security scanners as npm packages that you can install and use immediately.
### Installing a Scanner
Install a security scanner from npm:
```bash
$ bun add -d @acme/bun-security-scanner
```
> **Note:** Consult your security scanner's documentation for their specific package name and installation instructions. Most scanners will be installed with `bun add`.
### Configuring the Scanner
After installation, configure it in your `bunfig.toml`:
```toml
[install.security]
scanner = "@acme/bun-security-scanner"
```
### Enterprise Configuration
Some enterprise scanners might support authentication and/or configuration through environment variables:
```bash
# This might go in ~/.bashrc, for example
export SECURITY_API_KEY="your-api-key"
# The scanner will now use these credentials automatically
bun install
```
Consult your security scanner's documentation to learn which environment variables to set and if any additional configuration is required.
### Authoring your own scanner
For a complete example with tests and CI setup, see the official template:
[github.com/oven-sh/security-scanner-template](https://github.com/oven-sh/security-scanner-template)
## Related
- [Configuration (bunfig.toml)](/docs/runtime/bunfig#installsecurityscanner)
- [Package Manager](/docs/install)
- [Security Scanner Template](https://github.com/oven-sh/security-scanner-template)

View File

@@ -496,62 +496,6 @@ Whether to generate a non-Bun lockfile alongside `bun.lock`. (A `bun.lock` will
print = "yarn"
```
### `install.security.scanner`
Configure a security scanner to scan packages for vulnerabilities before installation.
First, install a security scanner from npm:
```bash
$ bun add -d @acme/bun-security-scanner
```
Then configure it in your `bunfig.toml`:
```toml
[install.security]
scanner = "@acme/bun-security-scanner"
```
When a security scanner is configured:
- Auto-install is automatically disabled for security
- Packages are scanned before installation
- Installation is cancelled if fatal issues are found
- Security warnings are displayed during installation
Learn more about [using and writing security scanners](/docs/install/security).
### `install.linker`
Configure the default linker strategy. Default `"hoisted"`.
For complete documentation refer to [Package manager > Isolated installs](https://bun.com/docs/install/isolated).
```toml
[install]
linker = "hoisted"
```
Valid values are:
{% table %}
- Value
- Description
---
- `"hoisted"`
- Link dependencies in a shared `node_modules` directory.
---
- `"isolated"`
- Link dependencies inside each package installation.
{% /table %}
<!-- ## Debugging -->
<!--

View File

@@ -532,74 +532,6 @@ Hello World! pwd=C:\Users\Demo
Bun Shell is a small programming language in Bun that is implemented in Zig. It includes a handwritten lexer, parser, and interpreter. Unlike bash, zsh, and other shells, Bun Shell runs operations concurrently.
## Security in the Bun shell
By design, the Bun shell _does not invoke a system shell_ (like `/bin/sh`) and
is instead a re-implementation of bash that runs in the same Bun process,
designed with security in mind.
When parsing command arguments, it treats all _interpolated variables_ as single, literal strings.
This protects the Bun shell against **command injection**:
```js
import { $ } from "bun";
const userInput = "my-file.txt; rm -rf /";
// SAFE: `userInput` is treated as a single quoted string
await $`ls ${userInput}`;
```
In the above example, `userInput` is treated as a single string. This causes
the `ls` command to try to read the contents of a single directory named
"my-file; rm -rf /".
### Security considerations
While command injection is prevented by default, developers are still
responsible for security in certain scenarios.
Similar to the `Bun.spawn` or `node:child_process.exec()` APIs, you can intentionally
execute a command which spawns a new shell (e.g. `bash -c`) with arguments.
When you do this, you hand off control, and Bun's built-in protections no
longer apply to the string interpreted by that new shell.
```js
import { $ } from "bun";
const userInput = "world; touch /tmp/pwned";
// UNSAFE: You have explicitly started a new shell process with `bash -c`.
// This new shell will execute the `touch` command. Any user input
// passed this way must be rigorously sanitized.
await $`bash -c "echo ${userInput}"`;
```
### Argument injection
The Bun shell cannot know how an external command interprets its own
command-line arguments. An attacker can supply input that the target program
recognizes as one of its own options or flags, leading to unintended behavior.
```js
import { $ } from "bun";
// Malicious input formatted as a Git command-line flag
const branch = "--upload-pack=echo pwned";
// UNSAFE: While Bun safely passes the string as a single argument,
// the `git` program itself sees and acts upon the malicious flag.
await $`git ls-remote origin ${branch}`;
```
{% callout %}
**Recommendation** — As is best practice in every language, always sanitize
user-provided input before passing it as an argument to an external command.
The responsibility for validating arguments rests with your application code.
{% /callout %}
## Credits
Large parts of this API were inspired by [zx](https://github.com/google/zx), [dax](https://github.com/dsherret/dax), and [bnx](https://github.com/wobsoriano/bnx). Thank you to the authors of those projects.

View File

@@ -7,10 +7,9 @@
"./packages/@types/bun"
],
"devDependencies": {
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
"@lezer/common": "^1.2.3",
"@lezer/cpp": "^1.1.3",
"@types/bun": "workspace:*",
"bun-tracestrings": "github:oven-sh/bun.report#912ca63e26c51429d3e6799aa2a6ab079b188fd8",
"esbuild": "^0.21.4",
"mitata": "^0.1.11",
"peechy": "0.4.34",
@@ -49,9 +48,6 @@
"css-properties": "bun run src/css/properties/generate_properties.ts",
"uv-posix-stubs": "bun run src/bun.js/bindings/libuv/generate_uv_posix_stubs.ts",
"bump": "bun ./scripts/bump.ts",
"jsc:build": "bun ./scripts/build-jsc.ts release",
"jsc:build:debug": "bun ./scripts/build-jsc.ts debug",
"jsc:build:lto": "bun ./scripts/build-jsc.ts lto",
"typecheck": "tsc --noEmit && cd test && bun run typecheck",
"fmt": "bun run prettier",
"fmt:cpp": "bun run clang-format",

View File

@@ -14,6 +14,7 @@
* This module aliases `globalThis.Bun`.
*/
declare module "bun" {
type DistributedOmit<T, K extends PropertyKey> = T extends T ? Omit<T, K> : never;
type PathLike = string | NodeJS.TypedArray | ArrayBufferLike | URL;
type ArrayBufferView<TArrayBuffer extends ArrayBufferLike = ArrayBufferLike> =
| NodeJS.TypedArray<TArrayBuffer>
@@ -67,31 +68,39 @@ declare module "bun" {
? T
: Otherwise // Not defined in lib dom (or anywhere else), so no conflict. We can safely use our own definition
: Otherwise; // Lib dom not loaded anyway, so no conflict. We can safely use our own definition
/**
* Like Omit, but correctly distributes over unions. Most useful for removing
* properties from union options objects, like {@link Bun.SQL.Options}
*
* @example
* ```ts
* type X = Bun.DistributedOmit<{type?: 'a', url?: string} | {type?: 'b', flag?: boolean}, "url">
* // `{type?: 'a'} | {type?: 'b', flag?: boolean}` (Omit applied to each union item instead of entire type)
*
* type X = Omit<{type?: 'a', url?: string} | {type?: 'b', flag?: boolean}, "url">;
* // `{type?: "a" | "b" | undefined}` (Missing `flag` property and no longer a union)
* ```
*/
type DistributedOmit<T, K extends PropertyKey> = T extends T ? Omit<T, K> : never;
type KeysInBoth<A, B> = Extract<keyof A, keyof B>;
type MergeInner<A, B> = Omit<A, KeysInBoth<A, B>> &
Omit<B, KeysInBoth<A, B>> & {
[Key in KeysInBoth<A, B>]: A[Key] | B[Key];
};
type Merge<A, B> = MergeInner<A, B> & MergeInner<B, A>;
type DistributedMerge<T, Else = T> = T extends T ? Merge<T, Exclude<Else, T>> : never;
}
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type Platform =
| "aix"
| "android"
| "darwin"
| "freebsd"
| "haiku"
| "linux"
| "openbsd"
| "sunos"
| "win32"
| "cygwin"
| "netbsd";
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type Architecture = "arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x64";
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void;
/**
* Most of the time the unhandledRejection will be an Error, but this should not be relied upon
* as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error.
*
* @deprecated This type is unused in Bun's types and might be removed in the near future
*/
type UnhandledRejectionListener = (reason: unknown, promise: Promise<unknown>) => void;
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type MultipleResolveListener = (type: MultipleResolveType, promise: Promise<unknown>, value: unknown) => void;
interface ErrorEventInit extends EventInit {
colno?: number;
error?: any;
@@ -587,23 +596,6 @@ declare module "bun" {
options?: StringWidthOptions,
): number;
/**
* Remove ANSI escape codes from a string.
*
* @category Utilities
*
* @param input The string to remove ANSI escape codes from.
* @returns The string with ANSI escape codes removed.
*
* @example
* ```ts
* import { stripANSI } from "bun";
*
* console.log(stripANSI("\u001b[31mhello\u001b[39m")); // "hello"
* ```
*/
function stripANSI(input: string): string;
/**
* TOML related APIs
*/
@@ -1267,6 +1259,678 @@ declare module "bun" {
stat(): Promise<import("node:fs").Stats>;
}
namespace SQL {
type AwaitPromisesArray<T extends Array<PromiseLike<any>>> = {
[K in keyof T]: Awaited<T[K]>;
};
type ContextCallbackResult<T> = T extends Array<PromiseLike<any>> ? AwaitPromisesArray<T> : Awaited<T>;
type ContextCallback<T, SQL> = (sql: SQL) => Promise<T>;
/**
* Configuration options for SQL client connection and behavior
*
* @example
* ```ts
* const config: Bun.SQL.Options = {
* host: 'localhost',
* port: 5432,
* user: 'dbuser',
* password: 'secretpass',
* database: 'myapp',
* idleTimeout: 30,
* max: 20,
* onconnect: (client) => {
* console.log('Connected to database');
* }
* };
* ```
*/
interface Options {
/**
* Connection URL (can be string or URL object)
*/
url?: URL | string | undefined;
/**
* Database server hostname
* @default "localhost"
*/
host?: string | undefined;
/**
* Database server hostname (alias for host)
* @deprecated Prefer {@link host}
* @default "localhost"
*/
hostname?: string | undefined;
/**
* Database server port number
* @default 5432
*/
port?: number | string | undefined;
/**
* Database user for authentication
* @default "postgres"
*/
username?: string | undefined;
/**
* Database user for authentication (alias for username)
* @deprecated Prefer {@link username}
* @default "postgres"
*/
user?: string | undefined;
/**
* Database password for authentication
* @default ""
*/
password?: string | (() => MaybePromise<string>) | undefined;
/**
* Database password for authentication (alias for password)
* @deprecated Prefer {@link password}
* @default ""
*/
pass?: string | (() => MaybePromise<string>) | undefined;
/**
* Name of the database to connect to
* @default The username value
*/
database?: string | undefined;
/**
* Name of the database to connect to (alias for database)
* @deprecated Prefer {@link database}
* @default The username value
*/
db?: string | undefined;
/**
* Database adapter/driver to use
* @default "postgres"
*/
adapter?: "postgres" /*| "sqlite" | "mysql"*/ | (string & {}) | undefined;
/**
* Maximum time in seconds to wait for connection to become available
* @default 0 (no timeout)
*/
idleTimeout?: number | undefined;
/**
* Maximum time in seconds to wait for connection to become available (alias for idleTimeout)
* @deprecated Prefer {@link idleTimeout}
* @default 0 (no timeout)
*/
idle_timeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection
* @default 30
*/
connectionTimeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connection_timeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connectTimeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connect_timeout?: number | undefined;
/**
* Maximum lifetime in seconds of a connection
* @default 0 (no maximum lifetime)
*/
maxLifetime?: number | undefined;
/**
* Maximum lifetime in seconds of a connection (alias for maxLifetime)
* @deprecated Prefer {@link maxLifetime}
* @default 0 (no maximum lifetime)
*/
max_lifetime?: number | undefined;
/**
* Whether to use TLS/SSL for the connection
* @default false
*/
tls?: TLSOptions | boolean | undefined;
/**
* Whether to use TLS/SSL for the connection (alias for tls)
* @default false
*/
ssl?: TLSOptions | boolean | undefined;
// `.path` is currently unsupported in Bun, the implementation is incomplete.
//
// /**
// * Unix domain socket path for connection
// * @default ""
// */
// path?: string | undefined;
/**
* Callback function executed when a connection is established
*/
onconnect?: ((client: SQL) => void) | undefined;
/**
* Callback function executed when a connection is closed
*/
onclose?: ((client: SQL) => void) | undefined;
/**
* Postgres client runtime configuration options
*
* @see https://www.postgresql.org/docs/current/runtime-config-client.html
*/
connection?: Record<string, string | boolean | number> | undefined;
/**
* Maximum number of connections in the pool
* @default 10
*/
max?: number | undefined;
/**
* By default values outside i32 range are returned as strings. If this is true, values outside i32 range are returned as BigInts.
* @default false
*/
bigint?: boolean | undefined;
/**
* Automatic creation of prepared statements
* @default true
*/
prepare?: boolean | undefined;
}
/**
* Represents a SQL query that can be executed, with additional control methods
* Extends Promise to allow for async/await usage
*/
interface Query<T> extends Promise<T> {
/**
* Indicates if the query is currently executing
*/
active: boolean;
/**
* Indicates if the query has been cancelled
*/
cancelled: boolean;
/**
* Cancels the executing query
*/
cancel(): Query<T>;
/**
* Executes the query as a simple query, no parameters are allowed but can execute multiple commands separated by semicolons
*/
simple(): Query<T>;
/**
* Executes the query
*/
execute(): Query<T>;
/**
* Returns the raw query result
*/
raw(): Query<T>;
/**
* Returns only the values from the query result
*/
values(): Query<T>;
}
/**
* Callback function type for transaction contexts
* @param sql Function to execute SQL queries within the transaction
*/
type TransactionContextCallback<T> = ContextCallback<T, TransactionSQL>;
/**
* Callback function type for savepoint contexts
* @param sql Function to execute SQL queries within the savepoint
*/
type SavepointContextCallback<T> = ContextCallback<T, SavepointSQL>;
/**
* SQL.Helper represents a parameter or serializable
* value inside of a query.
*
* @example
* ```ts
* const helper = sql(users, 'id');
* await sql`insert into users ${helper}`;
* ```
*/
interface Helper<T> {
readonly value: T[];
readonly columns: (keyof T)[];
}
}
/**
* Main SQL client interface providing connection and transaction management
*/
interface SQL extends AsyncDisposable {
/**
* Executes a SQL query using template literals
* @example
* ```ts
* const [user] = await sql<Users[]>`select * from users where id = ${1}`;
* ```
*/
<T = any>(strings: TemplateStringsArray, ...values: unknown[]): SQL.Query<T>;
/**
* Execute a SQL query using a string
*
* @example
* ```ts
* const users = await sql<User[]>`SELECT * FROM users WHERE id = ${1}`;
* ```
*/
<T = any>(string: string): SQL.Query<T>;
/**
* Helper function for inserting an object into a query
*
* @example
* ```ts
* // Insert an object
* const result = await sql`insert into users ${sql(users)} returning *`;
*
* // Or pick specific columns
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
*
* // Or a single object
* const result = await sql`insert into users ${sql(user)} returning *`;
* ```
*/
<T extends { [Key in PropertyKey]: unknown }>(obj: T | T[] | readonly T[]): SQL.Helper<T>;
/**
* Helper function for inserting an object into a query, supporting specific columns
*
* @example
* ```ts
* // Insert an object
* const result = await sql`insert into users ${sql(users)} returning *`;
*
* // Or pick specific columns
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
*
* // Or a single object
* const result = await sql`insert into users ${sql(user)} returning *`;
* ```
*/
<T extends { [Key in PropertyKey]: unknown }, Keys extends keyof T = keyof T>(
obj: T | T[] | readonly T[],
...columns: readonly Keys[]
): SQL.Helper<Pick<T, Keys>>;
/**
* Helper function for inserting any serializable value into a query
*
* @example
* ```ts
* const result = await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`;
* ```
*/
<T>(value: T): SQL.Helper<T>;
/**
* Commits a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
*
* @param name - The name of the distributed transaction
*
* @example
* ```ts
* await sql.commitDistributed("my_distributed_transaction");
* ```
*/
commitDistributed(name: string): Promise<void>;
/**
* Rolls back a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
*
* @param name - The name of the distributed transaction
*
* @example
* ```ts
* await sql.rollbackDistributed("my_distributed_transaction");
* ```
*/
rollbackDistributed(name: string): Promise<void>;
/** Waits for the database connection to be established
*
* @example
* ```ts
* await sql.connect();
* ```
*/
connect(): Promise<SQL>;
/**
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
*
* @param options - The options for the close
*
* @example
* ```ts
* await sql.close({ timeout: 1 });
* ```
*/
close(options?: { timeout?: number }): Promise<void>;
/**
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
* This is an alias of {@link SQL.close}
*
* @param options - The options for the close
*
* @example
* ```ts
* await sql.end({ timeout: 1 });
* ```
*/
end(options?: { timeout?: number }): Promise<void>;
/**
* Flushes any pending operations
*
* @example
* ```ts
* sql.flush();
* ```
*/
flush(): void;
/**
* The reserve method pulls out a connection from the pool, and returns a client that wraps the single connection.
*
* This can be used for running queries on an isolated connection.
* Calling reserve in a reserved Sql will return a new reserved connection, not the same connection (behavior matches postgres package).
*
* @example
* ```ts
* const reserved = await sql.reserve();
* await reserved`select * from users`;
* await reserved.release();
* // with in a production scenario would be something more like
* const reserved = await sql.reserve();
* try {
* // ... queries
* } finally {
* await reserved.release();
* }
*
* // Bun supports Symbol.dispose and Symbol.asyncDispose
* {
* // always release after context (safer)
* using reserved = await sql.reserve()
* await reserved`select * from users`
* }
* ```
*/
reserve(): Promise<ReservedSQL>;
/**
* Begins a new transaction.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @example
* const [user, account] = await sql.begin(async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
begin<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Begins a new transaction with options.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @example
* const [user, account] = await sql.begin("read write", async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
begin<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Alternative method to begin a transaction.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @alias begin
* @example
* const [user, account] = await sql.transaction(async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
transaction<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Alternative method to begin a transaction with options
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
*
* @alias {@link begin}
*
* @example
* const [user, account] = await sql.transaction("read write", async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* });
*/
transaction<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Begins a distributed transaction
* Also know as Two-Phase Commit, in a distributed transaction, Phase 1 involves the coordinator preparing nodes by ensuring data is written and ready to commit, while Phase 2 finalizes with nodes committing or rolling back based on the coordinator's decision, ensuring durability and releasing locks.
* In PostgreSQL and MySQL distributed transactions persist beyond the original session, allowing privileged users or coordinators to commit/rollback them, ensuring support for distributed transactions, recovery, and administrative tasks.
* beginDistributed will automatic rollback if any exception are not caught, and you can commit and rollback later if everything goes well.
* PostgreSQL natively supports distributed transactions using PREPARE TRANSACTION, while MySQL uses XA Transactions, and MSSQL also supports distributed/XA transactions. However, in MSSQL, distributed transactions are tied to the original session, the DTC coordinator, and the specific connection.
* These transactions are automatically committed or rolled back following the same rules as regular transactions, with no option for manual intervention from other sessions, in MSSQL distributed transactions are used to coordinate transactions using Linked Servers.
*
* @example
* await sql.beginDistributed("numbers", async sql => {
* await sql`create table if not exists numbers (a int)`;
* await sql`insert into numbers values(1)`;
* });
* // later you can call
* await sql.commitDistributed("numbers");
* // or await sql.rollbackDistributed("numbers");
*/
beginDistributed<const T>(
name: string,
fn: SQL.TransactionContextCallback<T>,
): Promise<SQL.ContextCallbackResult<T>>;
/** Alternative method to begin a distributed transaction
* @alias {@link beginDistributed}
*/
distributed<const T>(name: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**If you know what you're doing, you can use unsafe to pass any string you'd like.
* Please note that this can lead to SQL injection if you're not careful.
* You can also nest sql.unsafe within a safe sql expression. This is useful if only part of your fraction has unsafe elements.
* @example
* const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`)
*/
unsafe<T = any>(string: string, values?: any[]): SQL.Query<T>;
/**
* Reads a file and uses the contents as a query.
* Optional parameters can be used if the file includes $1, $2, etc
* @example
* const result = await sql.file("query.sql", [1, 2, 3]);
*/
file<T = any>(filename: string, values?: any[]): SQL.Query<T>;
/**
* Current client options
*/
options: SQL.Options;
}
const SQL: {
/**
* Creates a new SQL client instance
*
* @param connectionString - The connection string for the SQL client
*
* @example
* ```ts
* const sql = new SQL("postgres://localhost:5432/mydb");
* const sql = new SQL(new URL("postgres://localhost:5432/mydb"));
* ```
*/
new (connectionString: string | URL): SQL;
/**
* Creates a new SQL client instance with options
*
* @param connectionString - The connection string for the SQL client
* @param options - The options for the SQL client
*
* @example
* ```ts
* const sql = new SQL("postgres://localhost:5432/mydb", { idleTimeout: 1000 });
* ```
*/
new (connectionString: string | URL, options: Omit<SQL.Options, "url">): SQL;
/**
* Creates a new SQL client instance with options
*
* @param options - The options for the SQL client
*
* @example
* ```ts
* const sql = new SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 });
* ```
*/
new (options?: SQL.Options): SQL;
};
/**
* Represents a reserved connection from the connection pool
* Extends SQL with additional release functionality
*/
interface ReservedSQL extends SQL, Disposable {
/**
* Releases the client back to the connection pool
*/
release(): void;
}
/**
* Represents a client within a transaction context
* Extends SQL with savepoint functionality
*/
interface TransactionSQL extends SQL {
/** Creates a savepoint within the current transaction */
savepoint<T>(name: string, fn: SQLSavepointContextCallback<T>): Promise<T>;
savepoint<T>(fn: SQLSavepointContextCallback<T>): Promise<T>;
}
/**
* Represents a savepoint within a transaction
*/
interface SavepointSQL extends SQL {}
type CSRFAlgorithm = "blake2b256" | "blake2b512" | "sha256" | "sha384" | "sha512" | "sha512-256";
interface CSRFGenerateOptions {
@@ -1314,6 +1978,16 @@ declare module "bun" {
maxAge?: number;
}
/**
* SQL client
*/
const sql: SQL;
/**
* SQL client for PostgreSQL
*/
const postgres: SQL;
/**
* Generate and verify CSRF tokens
*
@@ -1628,24 +2302,12 @@ declare module "bun" {
kind: ImportKind;
}
namespace _BunBuildInterface {
type Architecture = "x64" | "arm64";
type Libc = "glibc" | "musl";
type SIMD = "baseline" | "modern";
type Target =
| `bun-darwin-${Architecture}`
| `bun-darwin-x64-${SIMD}`
| `bun-linux-${Architecture}`
| `bun-linux-${Architecture}-${Libc}`
| "bun-windows-x64"
| `bun-windows-x64-${SIMD}`
| `bun-linux-x64-${SIMD}-${Libc}`;
}
/**
* @see [Bun.build API docs](https://bun.com/docs/bundler#api)
*/
interface BuildConfigBase {
interface BuildConfig {
entrypoints: string[]; // list of file path
outdir?: string; // output directory
/**
* @default "browser"
*/
@@ -1683,6 +2345,7 @@ declare module "bun" {
asset?: string;
}; // | string;
root?: string; // project root
splitting?: boolean; // default true, enable code splitting
plugins?: BunPlugin[];
// manifest?: boolean; // whether to return manifest
external?: string[];
@@ -1831,57 +2494,8 @@ declare module "bun" {
* ```
*/
tsconfig?: string;
outdir?: string;
}
interface CompileBuildOptions {
target?: _BunBuildInterface.Target;
execArgv?: string[];
executablePath?: string;
outfile?: string;
windows?: {
hideConsole?: boolean;
icon?: string;
title?: string;
};
}
// Compile build config - uses outfile for executable output
interface CompileBuildConfig extends BuildConfigBase {
/**
* Create a standalone executable
*
* When `true`, creates an executable for the current platform.
* When a target string, creates an executable for that platform.
*
* @example
* ```ts
* // Create executable for current platform
* await Bun.build({
* entrypoints: ['./app.js'],
* compile: {
* target: 'linux-x64',
* },
* outfile: './my-app'
* });
*
* // Cross-compile for Linux x64
* await Bun.build({
* entrypoints: ['./app.js'],
* compile: 'linux-x64',
* outfile: './my-app'
* });
* ```
*/
compile: boolean | _BunBuildInterface.Target | CompileBuildOptions;
}
/**
* @see [Bun.build API docs](https://bun.com/docs/bundler#api)
*/
type BuildConfig = BuildConfigBase | CompileBuildConfig;
/**
* Hash and verify passwords using argon2 or bcrypt
*
@@ -3752,11 +4366,11 @@ declare module "bun" {
* The type of options that can be passed to {@link serve}, with support for `routes` and a safer requirement for `fetch`
*/
type ServeFunctionOptions<T, R extends { [K in keyof R]: RouterTypes.RouteValue<Extract<K, string>> }> =
| (__internal.DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "fetch"> & {
| (DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "fetch"> & {
routes: R;
fetch?: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})
| (__internal.DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "routes"> & {
| (DistributedOmit<Exclude<Serve<T>, WebSocketServeOptions<T>>, "routes"> & {
routes?: never;
fetch: (this: Server, request: Request, server: Server) => Response | Promise<Response>;
})

View File

@@ -1,35 +1,4 @@
declare module "bun" {
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type Platform =
| "aix"
| "android"
| "darwin"
| "freebsd"
| "haiku"
| "linux"
| "openbsd"
| "sunos"
| "win32"
| "cygwin"
| "netbsd";
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type Architecture = "arm" | "arm64" | "ia32" | "mips" | "mipsel" | "ppc" | "ppc64" | "s390" | "s390x" | "x64";
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type UncaughtExceptionListener = (error: Error, origin: UncaughtExceptionOrigin) => void;
/**
* Most of the time the unhandledRejection will be an Error, but this should not be relied upon
* as *anything* can be thrown/rejected, it is therefore unsafe to assume that the value is an Error.
*
* @deprecated This type is unused in Bun's types and might be removed in the near future
*/
type UnhandledRejectionListener = (reason: unknown, promise: Promise<unknown>) => void;
/** @deprecated This type is unused in Bun's types and might be removed in the near future */
type MultipleResolveListener = (type: MultipleResolveType, promise: Promise<unknown>, value: unknown) => void;
/**
* Consume all data from a {@link ReadableStream} until it closes or errors.
*

View File

@@ -1888,25 +1888,6 @@ interface BunFetchRequestInit extends RequestInit {
* ```
*/
unix?: string;
/**
* Control automatic decompression of the response body.
* When set to `false`, the response body will not be automatically decompressed,
* and the `Content-Encoding` header will be preserved. This can improve performance
* when you need to handle compressed data manually or forward it as-is.
* This is a custom property that is not part of the Fetch API specification.
*
* @default true
* @example
* ```js
* // Disable automatic decompression for a proxy server
* const response = await fetch("https://example.com/api", {
* decompress: false
* });
* // response.headers.get('content-encoding') might be 'gzip' or 'br'
* ```
*/
decompress?: boolean;
}
/**

View File

@@ -21,8 +21,6 @@
/// <reference path="./redis.d.ts" />
/// <reference path="./shell.d.ts" />
/// <reference path="./experimental.d.ts" />
/// <reference path="./sql.d.ts" />
/// <reference path="./security.d.ts" />
/// <reference path="./bun.ns.d.ts" />

View File

@@ -24,12 +24,6 @@ declare module "stream/web" {
}
}
declare module "url" {
interface URLSearchParams {
toJSON(): Record<string, string>;
}
}
declare global {
namespace NodeJS {
interface ProcessEnv extends Bun.Env {}

View File

@@ -574,50 +574,6 @@ declare module "bun" {
*/
getex(key: RedisClient.KeyLike): Promise<string | null>;
/**
* Get the value of a key and set its expiration in seconds
* @param key The key to get
* @param ex Set the specified expire time, in seconds
* @param seconds The number of seconds until expiration
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: RedisClient.KeyLike, ex: "EX", seconds: number): Promise<string | null>;
/**
* Get the value of a key and set its expiration in milliseconds
* @param key The key to get
* @param px Set the specified expire time, in milliseconds
* @param milliseconds The number of milliseconds until expiration
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: RedisClient.KeyLike, px: "PX", milliseconds: number): Promise<string | null>;
/**
* Get the value of a key and set its expiration at a specific Unix timestamp in seconds
* @param key The key to get
* @param exat Set the specified Unix time at which the key will expire, in seconds
* @param timestampSeconds The Unix timestamp in seconds
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: RedisClient.KeyLike, exat: "EXAT", timestampSeconds: number): Promise<string | null>;
/**
* Get the value of a key and set its expiration at a specific Unix timestamp in milliseconds
* @param key The key to get
* @param pxat Set the specified Unix time at which the key will expire, in milliseconds
* @param timestampMilliseconds The Unix timestamp in milliseconds
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: RedisClient.KeyLike, pxat: "PXAT", timestampMilliseconds: number): Promise<string | null>;
/**
* Get the value of a key and remove its expiration
* @param key The key to get
* @param persist Remove the expiration from the key
* @returns Promise that resolves with the value of the key, or null if the key doesn't exist
*/
getex(key: RedisClient.KeyLike, persist: "PERSIST"): Promise<string | null>;
/**
* Ping the server
* @returns Promise that resolves with "PONG" if the server is reachable, or throws an error if the server is not reachable

View File

@@ -1,101 +0,0 @@
declare module "bun" {
/**
* `bun install` security related declarations
*/
export namespace Security {
export interface Package {
/**
* The name of the package
*/
name: string;
/**
* The resolved version to be installed that matches the requested range.
*
* This is the exact version string, **not** a range.
*/
version: string;
/**
* The URL of the tgz of this package that Bun will download
*/
tarball: string;
/**
* The range that was requested by the command
*
* This could be a tag like `beta` or a semver range like `>=4.0.0`
*/
requestedRange: string;
}
/**
* Advisory represents the result of a security scan result of a package
*/
export interface Advisory {
/**
* Level represents the degree of danger for a security advisory
*
* Bun behaves differently depending on the values returned from the
* {@link Scanner.scan `scan()`} hook:
*
* > In any case, Bun *always* pretty prints *all* the advisories,
* > but...
* >
* > → if any **fatal**, Bun will immediately cancel the installation
* > and quit with a non-zero exit code
* >
* > → else if any **warn**, Bun will either ask the user if they'd like
* > to continue with the install if in a TTY environment, or
* > immediately exit if not.
*/
level: "fatal" | "warn";
/**
* The name of the package attempting to be installed.
*/
package: string;
/**
* If available, this is a url linking to a CVE or report online so
* users can learn more about the advisory.
*/
url: string | null;
/**
* If available, this is a brief description of the advisory that Bun
* will print to the user.
*/
description: string | null;
}
export interface Scanner {
/**
* This is the version of the scanner implementation. It may change in
* future versions, so we will use this version to discriminate between
* such versions. It's entirely possible this API changes in the future
* so much that version 1 would no longer be supported.
*
* The version is required because third-party scanner package versions
* are inherently unrelated to Bun versions
*/
version: "1";
/**
* Perform an advisory check when a user ran `bun add <package>
* [...packages]` or other related/similar commands.
*
* If this function throws an error, Bun will immediately stop the
* install process and print the error to the user.
*
* @param info An object containing an array of packages to be added.
* The package array will contain all proposed dependencies, including
* transitive ones. More simply, that means it will include dependencies
* of the packages the user wants to add.
*
* @returns A list of advisories.
*/
scan: (info: { packages: Package[] }) => Promise<Advisory[]>;
}
}
}

View File

@@ -1,809 +0,0 @@
import type * as BunSQLite from "bun:sqlite";
declare module "bun" {
/**
* Represents a reserved connection from the connection pool Extends SQL with
* additional release functionality
*/
interface ReservedSQL extends SQL, Disposable {
/**
* Releases the client back to the connection pool
*/
release(): void;
}
/**
* Represents a client within a transaction context Extends SQL with savepoint
* functionality
*/
interface TransactionSQL extends SQL {
/**
* Creates a savepoint within the current transaction
*/
savepoint<T>(name: string, fn: SQL.SavepointContextCallback<T>): Promise<T>;
savepoint<T>(fn: SQL.SavepointContextCallback<T>): Promise<T>;
/**
* The reserve method pulls out a connection from the pool, and returns a
* client that wraps the single connection.
*
* Using reserve() inside of a transaction will return a brand new
* connection, not one related to the transaction. This matches the
* behaviour of the `postgres` package.
*/
reserve(): Promise<ReservedSQL>;
}
namespace SQL {
class SQLError extends Error {
constructor(message: string);
}
class PostgresError extends SQLError {
public readonly code: string;
public readonly errno: string | undefined;
public readonly detail: string | undefined;
public readonly hint: string | undefined;
public readonly severity: string | undefined;
public readonly position: string | undefined;
public readonly internalPosition: string | undefined;
public readonly internalQuery: string | undefined;
public readonly where: string | undefined;
public readonly schema: string | undefined;
public readonly table: string | undefined;
public readonly column: string | undefined;
public readonly dataType: string | undefined;
public readonly constraint: string | undefined;
public readonly file: string | undefined;
public readonly line: string | undefined;
public readonly routine: string | undefined;
constructor(
message: string,
options: {
code: string;
errno?: string | undefined;
detail?: string;
hint?: string | undefined;
severity?: string | undefined;
position?: string | undefined;
internalPosition?: string;
internalQuery?: string;
where?: string | undefined;
schema?: string;
table?: string | undefined;
column?: string | undefined;
dataType?: string | undefined;
constraint?: string;
file?: string | undefined;
line?: string | undefined;
routine?: string | undefined;
},
);
}
class MySQLError extends SQLError {
public readonly code: string;
public readonly errno: number | undefined;
public readonly sqlState: string | undefined;
constructor(message: string, options: { code: string; errno: number | undefined; sqlState: string | undefined });
}
class SQLiteError extends SQLError {
public readonly code: string;
public readonly errno: number;
public readonly byteOffset?: number | undefined;
constructor(message: string, options: { code: string; errno: number; byteOffset?: number | undefined });
}
type AwaitPromisesArray<T extends Array<PromiseLike<any>>> = {
[K in keyof T]: Awaited<T[K]>;
};
type ContextCallbackResult<T> = T extends Array<PromiseLike<any>> ? AwaitPromisesArray<T> : Awaited<T>;
type ContextCallback<T, SQL> = (sql: SQL) => Bun.MaybePromise<T>;
interface SQLiteOptions extends BunSQLite.DatabaseOptions {
adapter?: "sqlite";
/**
* Specify the path to the database file
*
* Examples:
*
* - `sqlite://:memory:`
* - `sqlite://./path/to/database.db`
* - `sqlite:///Users/bun/projects/my-app/database.db`
* - `./dev.db`
* - `:memory:`
*
* @default ":memory:"
*/
filename?: URL | ":memory:" | (string & {}) | undefined;
/**
* Callback executed when a connection attempt completes (SQLite)
* Receives an Error on failure, or null on success.
*/
onconnect?: ((err: Error | null) => void) | undefined;
/**
* Callback executed when a connection is closed (SQLite)
* Receives the closing Error or null.
*/
onclose?: ((err: Error | null) => void) | undefined;
}
interface PostgresOrMySQLOptions {
/**
* Connection URL (can be string or URL object)
*/
url?: URL | string | undefined;
/**
* Database server hostname
* @default "localhost"
*/
host?: string | undefined;
/**
* Database server hostname (alias for host)
* @deprecated Prefer {@link host}
* @default "localhost"
*/
hostname?: string | undefined;
/**
* Database server port number
* @default 5432
*/
port?: number | string | undefined;
/**
* Database user for authentication
* @default "postgres"
*/
username?: string | undefined;
/**
* Database user for authentication (alias for username)
* @deprecated Prefer {@link username}
* @default "postgres"
*/
user?: string | undefined;
/**
* Database password for authentication
* @default ""
*/
password?: string | (() => MaybePromise<string>) | undefined;
/**
* Database password for authentication (alias for password)
* @deprecated Prefer {@link password}
* @default ""
*/
pass?: string | (() => MaybePromise<string>) | undefined;
/**
* Name of the database to connect to
* @default The username value
*/
database?: string | undefined;
/**
* Name of the database to connect to (alias for database)
* @deprecated Prefer {@link database}
* @default The username value
*/
db?: string | undefined;
/**
* Database adapter/driver to use
* @default "postgres"
*/
adapter?: "postgres" | "mysql" | "mariadb";
/**
* Maximum time in seconds to wait for connection to become available
* @default 0 (no timeout)
*/
idleTimeout?: number | undefined;
/**
* Maximum time in seconds to wait for connection to become available (alias for idleTimeout)
* @deprecated Prefer {@link idleTimeout}
* @default 0 (no timeout)
*/
idle_timeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection
* @default 30
*/
connectionTimeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connection_timeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias
* for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connectTimeout?: number | undefined;
/**
* Maximum time in seconds to wait when establishing a connection (alias
* for connectionTimeout)
* @deprecated Prefer {@link connectionTimeout}
* @default 30
*/
connect_timeout?: number | undefined;
/**
* Maximum lifetime in seconds of a connection
* @default 0 (no maximum lifetime)
*/
maxLifetime?: number | undefined;
/**
* Maximum lifetime in seconds of a connection (alias for maxLifetime)
* @deprecated Prefer {@link maxLifetime}
* @default 0 (no maximum lifetime)
*/
max_lifetime?: number | undefined;
/**
* Whether to use TLS/SSL for the connection
* @default false
*/
tls?: TLSOptions | boolean | undefined;
/**
* Whether to use TLS/SSL for the connection (alias for tls)
* @default false
*/
ssl?: TLSOptions | boolean | undefined;
/**
* Unix domain socket path for connection
* @default undefined
*/
path?: string | undefined;
/**
* Callback executed when a connection attempt completes
* Receives an Error on failure, or null on success.
*/
onconnect?: ((err: Error | null) => void) | undefined;
/**
* Callback executed when a connection is closed
* Receives the closing Error or null.
*/
onclose?: ((err: Error | null) => void) | undefined;
/**
* Postgres client runtime configuration options
*
* @see https://www.postgresql.org/docs/current/runtime-config-client.html
*/
connection?: Record<string, string | boolean | number> | undefined;
/**
* Maximum number of connections in the pool
* @default 10
*/
max?: number | undefined;
/**
* By default values outside i32 range are returned as strings. If this is
* true, values outside i32 range are returned as BigInts.
* @default false
*/
bigint?: boolean | undefined;
/**
* Automatic creation of prepared statements
* @default true
*/
prepare?: boolean | undefined;
}
/**
* Configuration options for SQL client connection and behavior
*
* @example
* ```ts
* const config: Bun.SQL.Options = {
* host: 'localhost',
* port: 5432,
* user: 'dbuser',
* password: 'secretpass',
* database: 'myapp',
* idleTimeout: 30,
* max: 20,
* onconnect: (client) => {
* console.log('Connected to database');
* }
* };
* ```
*/
type Options = SQLiteOptions | PostgresOrMySQLOptions;
/**
* Represents a SQL query that can be executed, with additional control
* methods Extends Promise to allow for async/await usage
*/
interface Query<T> extends Promise<T> {
/**
* Indicates if the query is currently executing
*/
active: boolean;
/**
* Indicates if the query has been cancelled
*/
cancelled: boolean;
/**
* Cancels the executing query
*/
cancel(): Query<T>;
/**
* Executes the query as a simple query, no parameters are allowed but can
* execute multiple commands separated by semicolons
*/
simple(): Query<T>;
/**
* Executes the query
*/
execute(): Query<T>;
/**
* Returns the raw query result
*/
raw(): Query<T>;
/**
* Returns only the values from the query result
*/
values(): Query<T>;
}
/**
* Callback function type for transaction contexts
* @param sql Function to execute SQL queries within the transaction
*/
type TransactionContextCallback<T> = ContextCallback<T, TransactionSQL>;
/**
* Callback function type for savepoint contexts
* @param sql Function to execute SQL queries within the savepoint
*/
type SavepointContextCallback<T> = ContextCallback<T, SavepointSQL>;
/**
* SQL.Helper represents a parameter or serializable
* value inside of a query.
*
* @example
* ```ts
* const helper = sql(users, 'id');
* await sql`insert into users ${helper}`;
* ```
*/
interface Helper<T> {
readonly value: T[];
readonly columns: (keyof T)[];
}
}
interface SQL extends AsyncDisposable {
/**
* Executes a SQL query using template literals
* @example
* ```ts
* const [user] = await sql<Users[]>`select * from users where id = ${1}`;
* ```
*/
<T = any>(strings: TemplateStringsArray, ...values: unknown[]): SQL.Query<T>;
/**
* Execute a SQL query using a string
*
* @example
* ```ts
* const users = await sql<User[]>`SELECT * FROM users WHERE id = ${1}`;
* ```
*/
<T = any>(string: string): SQL.Query<T>;
/**
* Helper function for inserting an object into a query
*
* @example
* ```ts
* // Insert an object
* const result = await sql`insert into users ${sql(users)} returning *`;
*
* // Or pick specific columns
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
*
* // Or a single object
* const result = await sql`insert into users ${sql(user)} returning *`;
* ```
*/
<T extends { [Key in PropertyKey]: unknown }>(obj: T | T[] | readonly T[]): SQL.Helper<T>; // Contributor note: This is the same as the signature below with the exception of the columns and the Pick<T, Keys>
/**
* Helper function for inserting an object into a query, supporting specific columns
*
* @example
* ```ts
* // Insert an object
* const result = await sql`insert into users ${sql(users)} returning *`;
*
* // Or pick specific columns
* const result = await sql`insert into users ${sql(users, "id", "name")} returning *`;
*
* // Or a single object
* const result = await sql`insert into users ${sql(user)} returning *`;
* ```
*/
<T extends { [Key in PropertyKey]: unknown }, Keys extends keyof T = keyof T>(
obj: T | T[] | readonly T[],
...columns: readonly Keys[]
): SQL.Helper<Pick<T, Keys>>; // Contributor note: This is the same as the signature above with the exception of this signature tracking keys
/**
* Helper function for inserting any serializable value into a query
*
* @example
* ```ts
* const result = await sql`SELECT * FROM users WHERE id IN ${sql([1, 2, 3])}`;
* ```
*/
<T>(value: T): SQL.Helper<T>;
}
/**
* Main SQL client interface providing connection and transaction management
*/
class SQL {
/**
* Creates a new SQL client instance
*
* @param connectionString - The connection string for the SQL client
*
* @example
* ```ts
* const sql = new SQL("postgres://localhost:5432/mydb");
* const sql = new SQL(new URL("postgres://localhost:5432/mydb"));
* ```
*/
constructor(connectionString: string | URL);
/**
* Creates a new SQL client instance with options
*
* @param connectionString - The connection string for the SQL client
* @param options - The options for the SQL client
*
* @example
* ```ts
* const sql = new SQL("postgres://localhost:5432/mydb", { idleTimeout: 1000 });
* ```
*/
constructor(
connectionString: string | URL,
options: Bun.__internal.DistributedOmit<SQL.Options, "url" | "filename">,
);
/**
* Creates a new SQL client instance with options
*
* @param options - The options for the SQL client
*
* @example
* ```ts
* const sql = new SQL({ url: "postgres://localhost:5432/mydb", idleTimeout: 1000 });
* ```
*/
constructor(options?: SQL.Options);
/**
* Current client options
*/
options: Bun.__internal.DistributedMerge<SQL.Options>;
/**
* Commits a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
*
* @param name - The name of the distributed transaction
*
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
*
* @example
* ```ts
* await sql.commitDistributed("my_distributed_transaction");
* ```
*/
commitDistributed(name: string): Promise<void>;
/**
* Rolls back a distributed transaction also know as prepared transaction in postgres or XA transaction in MySQL
*
* @param name - The name of the distributed transaction
*
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
*
* @example
* ```ts
* await sql.rollbackDistributed("my_distributed_transaction");
* ```
*/
rollbackDistributed(name: string): Promise<void>;
/** Waits for the database connection to be established
*
* @example
* ```ts
* await sql.connect();
* ```
*/
connect(): Promise<SQL>;
/**
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
*
* @param options - The options for the close
*
* @example
* ```ts
* await sql.close({ timeout: 1 });
* ```
*/
close(options?: { timeout?: number }): Promise<void>;
/**
* Closes the database connection with optional timeout in seconds. If timeout is 0, it will close immediately, if is not provided it will wait for all queries to finish before closing.
* This is an alias of {@link SQL.close}
*
* @param options - The options for the close
*
* @example
* ```ts
* await sql.end({ timeout: 1 });
* ```
*/
end(options?: { timeout?: number }): Promise<void>;
/**
* Flushes any pending operations
*
* @throws {Error} If the adapter does not support flushing (e.g., SQLite)
*
* @example
* ```ts
* sql.flush();
* ```
*/
flush(): void;
/**
* The reserve method pulls out a connection from the pool, and returns a client that wraps the single connection.
*
* This can be used for running queries on an isolated connection.
* Calling reserve in a reserved Sql will return a new reserved connection, not the same connection (behavior matches postgres package).
*
* @throws {Error} If the adapter does not support connection pooling (e.g., SQLite)s
*
* @example
* ```ts
* const reserved = await sql.reserve();
* await reserved`select * from users`;
* await reserved.release();
* // with in a production scenario would be something more like
* const reserved = await sql.reserve();
* try {
* // ... queries
* } finally {
* await reserved.release();
* }
*
* // Bun supports Symbol.dispose and Symbol.asyncDispose
* // always release after context (safer)
* using reserved = await sql.reserve()
* await reserved`select * from users`
* ```
*/
reserve(): Promise<ReservedSQL>;
/**
* Begins a new transaction.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @example
* const [user, account] = await sql.begin(async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
begin<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Begins a new transaction with options.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.begin will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @example
* const [user, account] = await sql.begin("read write", async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
begin<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Alternative method to begin a transaction.
*
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
* @alias begin
* @example
* const [user, account] = await sql.transaction(async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* })
*/
transaction<const T>(fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Alternative method to begin a transaction with options
* Will reserve a connection for the transaction and supply a scoped sql instance for all transaction uses in the callback function. sql.transaction will resolve with the returned value from the callback function.
* BEGIN is automatically sent with the optional options, and if anything fails ROLLBACK will be called so the connection can be released and execution can continue.
*
* @alias {@link begin}
*
* @example
* const [user, account] = await sql.transaction("read write", async sql => {
* const [user] = await sql`
* insert into users (
* name
* ) values (
* 'Murray'
* )
* returning *
* `
* const [account] = await sql`
* insert into accounts (
* user_id
* ) values (
* ${ user.user_id }
* )
* returning *
* `
* return [user, account]
* });
*/
transaction<const T>(options: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**
* Begins a distributed transaction
* Also know as Two-Phase Commit, in a distributed transaction, Phase 1 involves the coordinator preparing nodes by ensuring data is written and ready to commit, while Phase 2 finalizes with nodes committing or rolling back based on the coordinator's decision, ensuring durability and releasing locks.
* In PostgreSQL and MySQL distributed transactions persist beyond the original session, allowing privileged users or coordinators to commit/rollback them, ensuring support for distributed transactions, recovery, and administrative tasks.
* beginDistributed will automatic rollback if any exception are not caught, and you can commit and rollback later if everything goes well.
* PostgreSQL natively supports distributed transactions using PREPARE TRANSACTION, while MySQL uses XA Transactions, and MSSQL also supports distributed/XA transactions. However, in MSSQL, distributed transactions are tied to the original session, the DTC coordinator, and the specific connection.
* These transactions are automatically committed or rolled back following the same rules as regular transactions, with no option for manual intervention from other sessions, in MSSQL distributed transactions are used to coordinate transactions using Linked Servers.
*
* @throws {Error} If the adapter does not support distributed transactions (e.g., SQLite)
*
* @example
* await sql.beginDistributed("numbers", async sql => {
* await sql`create table if not exists numbers (a int)`;
* await sql`insert into numbers values(1)`;
* });
* // later you can call
* await sql.commitDistributed("numbers");
* // or await sql.rollbackDistributed("numbers");
*/
beginDistributed<const T>(
name: string,
fn: SQL.TransactionContextCallback<T>,
): Promise<SQL.ContextCallbackResult<T>>;
/** Alternative method to begin a distributed transaction
* @alias {@link beginDistributed}
*/
distributed<const T>(name: string, fn: SQL.TransactionContextCallback<T>): Promise<SQL.ContextCallbackResult<T>>;
/**If you know what you're doing, you can use unsafe to pass any string you'd like.
* Please note that this can lead to SQL injection if you're not careful.
* You can also nest sql.unsafe within a safe sql expression. This is useful if only part of your fraction has unsafe elements.
* @example
* const result = await sql.unsafe(`select ${danger} from users where id = ${dragons}`)
*/
unsafe<T = any>(string: string, values?: any[]): SQL.Query<T>;
/**
* Reads a file and uses the contents as a query.
* Optional parameters can be used if the file includes $1, $2, etc
* @example
* const result = await sql.file("query.sql", [1, 2, 3]);
*/
file<T = any>(filename: string, values?: any[]): SQL.Query<T>;
}
/**
* SQL client
*/
const sql: SQL;
/**
* SQL client for PostgreSQL
*
* @deprecated Prefer {@link Bun.sql}
*/
const postgres: SQL;
/**
* Represents a savepoint within a transaction
*/
interface SavepointSQL extends SQL {}
}

View File

@@ -24,66 +24,6 @@
* | `null` | `NULL` |
*/
declare module "bun:sqlite" {
/**
* Options for {@link Database}
*/
export interface DatabaseOptions {
/**
* Open the database as read-only (no write operations, no create).
*
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
*/
readonly?: boolean;
/**
* Allow creating a new database
*
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
*/
create?: boolean;
/**
* Open the database as read-write
*
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
*/
readwrite?: boolean;
/**
* When set to `true`, integers are returned as `bigint` types.
*
* When set to `false`, integers are returned as `number` types and truncated to 52 bits.
*
* @default false
* @since v1.1.14
*/
safeIntegers?: boolean;
/**
* When set to `false` or `undefined`:
* - Queries missing bound parameters will NOT throw an error
* - Bound named parameters in JavaScript need to exactly match the SQL query.
*
* @example
* ```ts
* const db = new Database(":memory:", { strict: false });
* db.run("INSERT INTO foo (name) VALUES ($name)", { $name: "foo" });
* ```
*
* When set to `true`:
* - Queries missing bound parameters will throw an error
* - Bound named parameters in JavaScript no longer need to be `$`, `:`, or `@`. The SQL query will remain prefixed.
*
* @example
* ```ts
* const db = new Database(":memory:", { strict: true });
* db.run("INSERT INTO foo (name) VALUES ($name)", { name: "foo" });
* ```
* @since v1.1.14
*/
strict?: boolean;
}
/**
* A SQLite3 database
*
@@ -113,6 +53,8 @@ declare module "bun:sqlite" {
* ```ts
* const db = new Database("mydb.sqlite", {readonly: true});
* ```
*
* @category Database
*/
export class Database implements Disposable {
/**
@@ -121,19 +63,96 @@ declare module "bun:sqlite" {
* @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` or undefined for an in-memory database.
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
*/
constructor(filename?: string, options?: number | DatabaseOptions);
constructor(
filename?: string,
options?:
| number
| {
/**
* Open the database as read-only (no write operations, no create).
*
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
*/
readonly?: boolean;
/**
* Allow creating a new database
*
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
*/
create?: boolean;
/**
* Open the database as read-write
*
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
*/
readwrite?: boolean;
/**
* When set to `true`, integers are returned as `bigint` types.
*
* When set to `false`, integers are returned as `number` types and truncated to 52 bits.
*
* @default false
* @since v1.1.14
*/
safeIntegers?: boolean;
/**
* When set to `false` or `undefined`:
* - Queries missing bound parameters will NOT throw an error
* - Bound named parameters in JavaScript need to exactly match the SQL query.
*
* @example
* ```ts
* const db = new Database(":memory:", { strict: false });
* db.run("INSERT INTO foo (name) VALUES ($name)", { $name: "foo" });
* ```
*
* When set to `true`:
* - Queries missing bound parameters will throw an error
* - Bound named parameters in JavaScript no longer need to be `$`, `:`, or `@`. The SQL query will remain prefixed.
*
* @example
* ```ts
* const db = new Database(":memory:", { strict: true });
* db.run("INSERT INTO foo (name) VALUES ($name)", { name: "foo" });
* ```
* @since v1.1.14
*/
strict?: boolean;
},
);
/**
* Open or create a SQLite3 databases
*
* @param filename The filename of the database to open. Pass an empty string (`""`) or `":memory:"` or undefined for an in-memory database.
* @param options defaults to `{readwrite: true, create: true}`. If a number, then it's treated as `SQLITE_OPEN_*` constant flags.
*
* This is an alias of `new Database()`
*
* See {@link Database}
*/
static open(filename: string, options?: number | DatabaseOptions): Database;
static open(
filename: string,
options?:
| number
| {
/**
* Open the database as read-only (no write operations, no create).
*
* Equivalent to {@link constants.SQLITE_OPEN_READONLY}
*/
readonly?: boolean;
/**
* Allow creating a new database
*
* Equivalent to {@link constants.SQLITE_OPEN_CREATE}
*/
create?: boolean;
/**
* Open the database as read-write
*
* Equivalent to {@link constants.SQLITE_OPEN_READWRITE}
*/
readwrite?: boolean;
},
): Database;
/**
* Execute a SQL query **without returning any results**.
@@ -184,11 +203,8 @@ declare module "bun:sqlite" {
* @returns `Database` instance
*/
run<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
/**
* This is an alias of {@link Database.run}
*
* @deprecated Prefer {@link Database.run}
*/
exec<ParamsType extends SQLQueryBindings[]>(sql: string, ...bindings: ParamsType[]): Changes;
@@ -335,16 +351,6 @@ declare module "bun:sqlite" {
*/
static setCustomSQLite(path: string): boolean;
/**
* Closes the database when using the async resource proposal
*
* @example
* ```
* using db = new Database("myapp.db");
* doSomethingWithDatabase(db);
* // Automatically closed when `db` goes out of scope
* ```
*/
[Symbol.dispose](): void;
/**
@@ -738,30 +744,6 @@ declare module "bun:sqlite" {
*/
values(...params: ParamsType): Array<Array<string | bigint | number | boolean | Uint8Array>>;
/**
* Execute the prepared statement and return all results as arrays of
* `Uint8Array`s.
*
* This is similar to `values()` but returns all values as Uint8Array
* objects, regardless of their original SQLite type.
*
* @param params optional values to bind to the statement. If omitted, the
* statement is run with the last bound values or no parameters if there are
* none.
*
* @example
* ```ts
* const stmt = db.prepare("SELECT * FROM foo WHERE bar = ?");
*
* stmt.raw("baz");
* // => [[Uint8Array(24)]]
*
* stmt.raw();
* // => [[Uint8Array(24)]]
* ```
*/
raw(...params: ParamsType): Array<Array<Uint8Array | null>>;
/**
* The names of the columns returned by the prepared statement.
* @example

View File

@@ -30,17 +30,13 @@ extern void __attribute((__noreturn__)) Bun__panic(const char* message, size_t l
#define BUN_PANIC(message) Bun__panic(message, sizeof(message) - 1)
#endif
extern void Bun__internal_ensureDateHeaderTimerIsEnabled(struct us_loop_t *loop);
void sweep_timer_cb(struct us_internal_callback_t *cb);
void us_internal_enable_sweep_timer(struct us_loop_t *loop) {
loop->data.sweep_timer_count++;
if (loop->data.sweep_timer_count == 1) {
if (loop->data.sweep_timer_count == 0) {
us_timer_set(loop->data.sweep_timer, (void (*)(struct us_timer_t *)) sweep_timer_cb, LIBUS_TIMEOUT_GRANULARITY * 1000, LIBUS_TIMEOUT_GRANULARITY * 1000);
Bun__internal_ensureDateHeaderTimerIsEnabled(loop);
}
loop->data.sweep_timer_count++;
}
void us_internal_disable_sweep_timer(struct us_loop_t *loop) {

View File

@@ -82,6 +82,19 @@ private:
static Loop *create(void *hint) {
Loop *loop = ((Loop *) us_create_loop(hint, wakeupCb, preCb, postCb, sizeof(LoopData)))->init();
/* We also need some timers (should live off the one 4 second timer rather) */
LoopData *loopData = (LoopData *) us_loop_ext((struct us_loop_t *) loop);
loopData->dateTimer = us_create_timer((struct us_loop_t *) loop, 1, sizeof(LoopData *));
loopData->updateDate();
memcpy(us_timer_ext(loopData->dateTimer), &loopData, sizeof(LoopData *));
us_timer_set(loopData->dateTimer, [](struct us_timer_t *t) {
LoopData *loopData;
memcpy(&loopData, us_timer_ext(t), sizeof(LoopData *));
loopData->updateDate();
}, 1000, 1000);
return loop;
}
@@ -133,7 +146,10 @@ public:
/* Freeing the default loop should be done once */
void free() {
LoopData *loopData = (LoopData *) us_loop_ext((us_loop_t *) this);
/* Stop and free dateTimer first */
us_timer_close(loopData->dateTimer, 1);
loopData->~LoopData();
/* uSockets will track whether this loop is owned by us or a borrowed alien loop */
us_loop_free((us_loop_t *) this);

View File

@@ -151,6 +151,8 @@ public:
ZlibContext *zlibContext = nullptr;
InflationStream *inflationStream = nullptr;
DeflationStream *deflationStream = nullptr;
us_timer_t *dateTimer;
};
}

View File

@@ -1,215 +0,0 @@
#!/usr/bin/env bun
import { spawnSync } from "child_process";
import { existsSync, mkdirSync } from "fs";
import { arch, platform } from "os";
import { join, resolve } from "path";
// Build configurations
type BuildConfig = "debug" | "release" | "lto";
// Parse command line arguments
const args = process.argv.slice(2);
const buildConfig: BuildConfig = (args[0] as BuildConfig) || "debug";
const validConfigs = ["debug", "release", "lto"];
if (!validConfigs.includes(buildConfig)) {
console.error(`Invalid build configuration: ${buildConfig}`);
console.error(`Valid configurations: ${validConfigs.join(", ")}`);
process.exit(1);
}
// Detect platform
const OS_NAME = platform().toLowerCase();
const ARCH_NAME_RAW = arch();
const IS_MAC = OS_NAME === "darwin";
const IS_LINUX = OS_NAME === "linux";
const IS_ARM64 = ARCH_NAME_RAW === "arm64" || ARCH_NAME_RAW === "aarch64";
// Paths
const ROOT_DIR = resolve(import.meta.dir, "..");
const WEBKIT_DIR = resolve(ROOT_DIR, "vendor/WebKit");
const WEBKIT_BUILD_DIR = join(WEBKIT_DIR, "WebKitBuild");
const WEBKIT_RELEASE_DIR = join(WEBKIT_BUILD_DIR, "Release");
const WEBKIT_DEBUG_DIR = join(WEBKIT_BUILD_DIR, "Debug");
const WEBKIT_RELEASE_DIR_LTO = join(WEBKIT_BUILD_DIR, "ReleaseLTO");
// Homebrew prefix detection
const HOMEBREW_PREFIX = IS_ARM64 ? "/opt/homebrew/" : "/usr/local/";
// Compiler detection
function findExecutable(names: string[]): string | null {
for (const name of names) {
const result = spawnSync("which", [name], { encoding: "utf8" });
if (result.status === 0) {
return result.stdout.trim();
}
}
return null;
}
const CC = findExecutable(["clang-19", "clang"]) || "clang";
const CXX = findExecutable(["clang++-19", "clang++"]) || "clang++";
// Build directory based on config
const getBuildDir = (config: BuildConfig) => {
switch (config) {
case "debug":
return WEBKIT_DEBUG_DIR;
case "lto":
return WEBKIT_RELEASE_DIR_LTO;
default:
return WEBKIT_RELEASE_DIR;
}
};
// Common CMake flags
const getCommonFlags = () => {
const flags = [
"-DPORT=JSCOnly",
"-DENABLE_STATIC_JSC=ON",
"-DALLOW_LINE_AND_COLUMN_NUMBER_IN_BUILTINS=ON",
"-DUSE_THIN_ARCHIVES=OFF",
"-DUSE_BUN_JSC_ADDITIONS=ON",
"-DUSE_BUN_EVENT_LOOP=ON",
"-DENABLE_FTL_JIT=ON",
"-G",
"Ninja",
`-DCMAKE_C_COMPILER=${CC}`,
`-DCMAKE_CXX_COMPILER=${CXX}`,
];
if (IS_MAC) {
flags.push(
"-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON",
"-DBUN_FAST_TLS=ON",
"-DPTHREAD_JIT_PERMISSIONS_API=1",
"-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON",
);
} else if (IS_LINUX) {
flags.push(
"-DJSEXPORT_PRIVATE=WTF_EXPORT_DECLARATION",
"-DUSE_VISIBILITY_ATTRIBUTE=1",
"-DENABLE_REMOTE_INSPECTOR=ON",
);
}
return flags;
};
// Build-specific CMake flags
const getBuildFlags = (config: BuildConfig) => {
const flags = [...getCommonFlags()];
switch (config) {
case "debug":
flags.push(
"-DCMAKE_BUILD_TYPE=Debug",
"-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON",
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
"-DENABLE_REMOTE_INSPECTOR=ON",
"-DUSE_VISIBILITY_ATTRIBUTE=1",
);
if (IS_MAC) {
// Enable address sanitizer by default on Mac debug builds
flags.push("-DENABLE_SANITIZERS=address");
// To disable asan, comment the line above and uncomment:
// flags.push("-DENABLE_MALLOC_HEAP_BREAKDOWN=ON");
}
break;
case "lto":
flags.push("-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
break;
default: // release
flags.push("-DCMAKE_BUILD_TYPE=RelWithDebInfo");
break;
}
return flags;
};
// Environment variables for the build
const getBuildEnv = () => {
const env = { ...process.env };
const cflags = ["-ffat-lto-objects"];
const cxxflags = ["-ffat-lto-objects"];
if (IS_LINUX && buildConfig !== "lto") {
cflags.push("-Wl,--whole-archive");
cxxflags.push("-Wl,--whole-archive", "-DUSE_BUN_JSC_ADDITIONS=ON", "-DUSE_BUN_EVENT_LOOP=ON");
}
env.CFLAGS = (env.CFLAGS || "") + " " + cflags.join(" ");
env.CXXFLAGS = (env.CXXFLAGS || "") + " " + cxxflags.join(" ");
if (IS_MAC) {
env.ICU_INCLUDE_DIRS = `${HOMEBREW_PREFIX}opt/icu4c/include`;
}
return env;
};
// Run a command with proper error handling
function runCommand(command: string, args: string[], options: any = {}) {
console.log(`Running: ${command} ${args.join(" ")}`);
const result = spawnSync(command, args, {
stdio: "inherit",
...options,
});
if (result.error) {
console.error(`Failed to execute command: ${result.error.message}`);
process.exit(1);
}
if (result.status !== 0) {
console.error(`Command failed with exit code ${result.status}`);
process.exit(result.status || 1);
}
}
// Main build function
function buildJSC() {
const buildDir = getBuildDir(buildConfig);
const cmakeFlags = getBuildFlags(buildConfig);
const env = getBuildEnv();
console.log(`Building JSC with configuration: ${buildConfig}`);
console.log(`Build directory: ${buildDir}`);
// Create build directories
if (!existsSync(buildDir)) {
mkdirSync(buildDir, { recursive: true });
}
if (!existsSync(WEBKIT_DIR)) {
mkdirSync(WEBKIT_DIR, { recursive: true });
}
// Configure with CMake
console.log("\n📦 Configuring with CMake...");
runCommand("cmake", [...cmakeFlags, WEBKIT_DIR, buildDir], {
cwd: buildDir,
env,
});
// Build with CMake
console.log("\n🔨 Building JSC...");
const buildType = buildConfig === "debug" ? "Debug" : buildConfig === "lto" ? "Release" : "RelWithDebInfo";
runCommand("cmake", ["--build", buildDir, "--config", buildType, "--target", "jsc"], {
cwd: buildDir,
env,
});
console.log(`\n✅ JSC build completed successfully!`);
console.log(`Build output: ${buildDir}`);
}
// Entry point
if (import.meta.main) {
buildJSC();
}

View File

@@ -5,9 +5,7 @@ import { chmodSync, cpSync, existsSync, mkdirSync, readFileSync } from "node:fs"
import { basename, join, relative, resolve } from "node:path";
import {
formatAnnotationToHtml,
getSecret,
isCI,
isWindows,
parseAnnotations,
printEnvironment,
reportAnnotationToBuildKite,
@@ -216,47 +214,14 @@ function parseOptions(args, flags = []) {
async function spawn(command, args, options, label) {
const effectiveArgs = args.filter(Boolean);
const description = [command, ...effectiveArgs].map(arg => (arg.includes(" ") ? JSON.stringify(arg) : arg)).join(" ");
let env = options?.env;
console.log("$", description);
label ??= basename(command);
const pipe = process.env.CI === "true";
if (isBuildkite()) {
if (process.env.BUN_LINK_ONLY && isWindows) {
env ||= options?.env || { ...process.env };
// Pass signing secrets directly to the build process
// The PowerShell signing script will handle certificate decoding
env.SM_CLIENT_CERT_PASSWORD = getSecret("SM_CLIENT_CERT_PASSWORD", {
redact: true,
required: true,
});
env.SM_CLIENT_CERT_FILE = getSecret("SM_CLIENT_CERT_FILE", {
redact: true,
required: true,
});
env.SM_API_KEY = getSecret("SM_API_KEY", {
redact: true,
required: true,
});
env.SM_KEYPAIR_ALIAS = getSecret("SM_KEYPAIR_ALIAS", {
redact: true,
required: true,
});
env.SM_HOST = getSecret("SM_HOST", {
redact: true,
required: true,
});
}
}
const subprocess = nodeSpawn(command, effectiveArgs, {
stdio: pipe ? "pipe" : "inherit",
...options,
env,
});
let killedManually = false;

View File

@@ -182,37 +182,6 @@ if (options["quiet"]) {
isQuiet = true;
}
let newFiles = [];
let prFileCount = 0;
if (isBuildkite) {
try {
console.log("on buildkite: collecting new files from PR");
const per_page = 50;
for (let i = 1; i <= 5; i++) {
const res = await fetch(
`https://api.github.com/repos/oven-sh/bun/pulls/${process.env.BUILDKITE_PULL_REQUEST}/files?per_page=${per_page}&page=${i}`,
{
headers: {
Authorization: `Bearer ${getSecret("GITHUB_TOKEN")}`,
},
},
);
const doc = await res.json();
console.log(`-> page ${i}, found ${doc.length} items`);
if (doc.length === 0) break;
if (doc.length < per_page) break;
for (const { filename, status } of doc) {
prFileCount += 1;
if (status !== "added") continue;
newFiles.push(filename);
}
}
console.log(`- PR ${process.env.BUILDKITE_PULL_REQUEST}, ${prFileCount} files, ${newFiles.length} new files`);
} catch (e) {
console.error(e);
}
}
let coresDir;
if (options["coredump-upload"]) {
@@ -565,7 +534,6 @@ async function runTests() {
};
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(testPath)) {
env.BUN_JSC_validateExceptionChecks = "1";
env.BUN_JSC_dumpSimulatedThrows = "1";
}
return runTest(title, async () => {
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
@@ -1289,7 +1257,6 @@ async function spawnBunTest(execPath, testPath, options = { cwd }) {
};
if ((basename(execPath).includes("asan") || !isCI) && shouldValidateExceptions(relative(cwd, absPath))) {
env.BUN_JSC_validateExceptionChecks = "1";
env.BUN_JSC_dumpSimulatedThrows = "1";
}
const { ok, error, stdout, crashes } = await spawnBun(execPath, {
@@ -2019,9 +1986,6 @@ function formatTestToMarkdown(result, concise, retries) {
if (retries > 0) {
markdown += ` (${retries} ${retries === 1 ? "retry" : "retries"})`;
}
if (newFiles.includes(testTitle)) {
markdown += ` (new)`;
}
if (concise) {
markdown += "</li>\n";
@@ -2227,7 +2191,6 @@ function isAlwaysFailure(error) {
error.includes("illegal instruction") ||
error.includes("sigtrap") ||
error.includes("error: addresssanitizer") ||
error.includes("internal assertion failure") ||
error.includes("core dumped") ||
error.includes("crash reported")
);

View File

@@ -40,25 +40,7 @@ if ($args.Count -gt 0) {
$commandArgs = @($args[1..($args.Count - 1)] | % {$_})
}
# Don't print the full command as it may contain sensitive information like certificates
# Just show the command name and basic info
$displayArgs = @()
foreach ($arg in $commandArgs) {
if ($arg -match "^-") {
# Include flags
$displayArgs += $arg
} elseif ($arg -match "\.(mjs|js|ts|cmake|zig|cpp|c|h|exe)$") {
# Include file names
$displayArgs += $arg
} elseif ($arg.Length -gt 100) {
# Truncate long arguments (likely certificates or encoded data)
$displayArgs += "[REDACTED]"
} else {
$displayArgs += $arg
}
}
Write-Host "$ $command $displayArgs"
Write-Host "$ $command $commandArgs"
& $command $commandArgs
exit $LASTEXITCODE
}

View File

@@ -6,7 +6,6 @@ pub const StandaloneModuleGraph = struct {
bytes: []const u8 = "",
files: bun.StringArrayHashMap(File),
entry_point_id: u32 = 0,
compile_exec_argv: []const u8 = "",
// We never want to hit the filesystem for these files
// We use the `/$bunfs/` prefix to indicate that it's a virtual path
@@ -55,7 +54,7 @@ pub const StandaloneModuleGraph = struct {
// by normalized file path
pub fn find(this: *const StandaloneModuleGraph, name: []const u8) ?*File {
if (!isBunStandaloneFilePath(name)) {
if (!isBunStandaloneFilePath(base_path)) {
return null;
}
@@ -280,7 +279,6 @@ pub const StandaloneModuleGraph = struct {
byte_count: usize = 0,
modules_ptr: bun.StringPointer = .{},
entry_point_id: u32 = 0,
compile_exec_argv_ptr: bun.StringPointer = .{},
};
const trailer = "\n---- Bun! ----\n";
@@ -325,7 +323,6 @@ pub const StandaloneModuleGraph = struct {
.bytes = raw_bytes[0..offsets.byte_count],
.files = modules,
.entry_point_id = offsets.entry_point_id,
.compile_exec_argv = sliceToZ(raw_bytes, offsets.compile_exec_argv_ptr),
};
}
@@ -341,14 +338,14 @@ pub const StandaloneModuleGraph = struct {
return bytes[ptr.offset..][0..ptr.length :0];
}
pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile, output_format: bun.options.Format, compile_exec_argv: []const u8) ![]u8 {
pub fn toBytes(allocator: std.mem.Allocator, prefix: []const u8, output_files: []const bun.options.OutputFile, output_format: bun.options.Format) ![]u8 {
var serialize_trace = bun.perf.trace("StandaloneModuleGraph.serialize");
defer serialize_trace.end();
var entry_point_id: ?usize = null;
var string_builder = bun.StringBuilder{};
var module_count: usize = 0;
for (output_files) |*output_file| {
for (output_files) |output_file| {
string_builder.countZ(output_file.dest_path);
string_builder.countZ(prefix);
if (output_file.value == .buffer) {
@@ -382,7 +379,6 @@ pub const StandaloneModuleGraph = struct {
string_builder.cap += trailer.len;
string_builder.cap += 16;
string_builder.cap += @sizeOf(Offsets);
string_builder.countZ(compile_exec_argv);
try string_builder.allocate(allocator);
@@ -395,7 +391,7 @@ pub const StandaloneModuleGraph = struct {
var source_map_arena = bun.ArenaAllocator.init(allocator);
defer source_map_arena.deinit();
for (output_files) |*output_file| {
for (output_files) |output_file| {
if (!output_file.output_kind.isFileInStandaloneMode()) {
continue;
}
@@ -467,7 +463,6 @@ pub const StandaloneModuleGraph = struct {
const offsets = Offsets{
.entry_point_id = @as(u32, @truncate(entry_point_id.?)),
.modules_ptr = string_builder.appendCount(std.mem.sliceAsBytes(modules.items)),
.compile_exec_argv_ptr = string_builder.appendCountZ(compile_exec_argv),
.byte_count = string_builder.len,
};
@@ -496,21 +491,6 @@ pub const StandaloneModuleGraph = struct {
windows_hide_console: bool = false,
};
pub const CompileResult = union(enum) {
success: void,
error_message: []const u8,
pub fn fail(msg: []const u8) CompileResult {
return .{ .error_message = msg };
}
pub fn deinit(this: *const @This()) void {
if (this.* == .error_message) {
bun.default_allocator.free(this.error_message);
}
}
};
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions, target: *const CompileTarget) bun.FileDescriptor {
var buf: bun.PathBuffer = undefined;
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
@@ -647,7 +627,6 @@ pub const StandaloneModuleGraph = struct {
cleanup(zname, fd);
Global.exit(1);
};
break :brk fd;
};
@@ -837,43 +816,7 @@ pub const StandaloneModuleGraph = struct {
var needs_download: bool = true;
const dest_z = target.exePath(&exe_path_buf, version_str, env, &needs_download);
if (needs_download) {
target.downloadToPath(env, allocator, dest_z) catch |err| {
// For CLI, provide detailed error messages and exit
switch (err) {
error.TargetNotFound => {
Output.errGeneric(
\\Does this target and version of Bun exist?
\\
\\404 downloading {} from npm registry
, .{target.*});
},
error.NetworkError => {
Output.errGeneric(
\\Failed to download cross-compilation target.
\\
\\Network error downloading {} from npm registry
, .{target.*});
},
error.InvalidResponse => {
Output.errGeneric(
\\Failed to verify the integrity of the downloaded tarball.
\\
\\The downloaded content for {} appears to be corrupted
, .{target.*});
},
error.ExtractionFailed => {
Output.errGeneric(
\\Failed to extract the downloaded tarball.
\\
\\Could not extract executable for {}
, .{target.*});
},
else => {
Output.errGeneric("Failed to download {}: {s}", .{ target.*, @errorName(err) });
},
}
Global.exit(1);
};
try target.downloadToPath(env, allocator, dest_z);
}
return try allocator.dupeZ(u8, dest_z);
@@ -890,68 +833,27 @@ pub const StandaloneModuleGraph = struct {
output_format: bun.options.Format,
windows_hide_console: bool,
windows_icon: ?[]const u8,
compile_exec_argv: []const u8,
self_exe_path: ?[]const u8,
) !CompileResult {
const bytes = toBytes(allocator, module_prefix, output_files, output_format, compile_exec_argv) catch |err| {
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to generate module graph bytes: {s}", .{@errorName(err)}) catch "failed to generate module graph bytes");
};
if (bytes.len == 0) return CompileResult.fail("no output files to bundle");
defer allocator.free(bytes);
) !void {
const bytes = try toBytes(allocator, module_prefix, output_files, output_format);
if (bytes.len == 0) return;
var free_self_exe = false;
const self_exe = if (self_exe_path) |path| brk: {
free_self_exe = true;
break :brk allocator.dupeZ(u8, path) catch bun.outOfMemory();
} else if (target.isDefault())
bun.selfExePath() catch |err| {
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get self executable path: {s}", .{@errorName(err)}) catch "failed to get self executable path");
}
else blk: {
var exe_path_buf: bun.PathBuffer = undefined;
var version_str_buf: [1024]u8 = undefined;
const version_str = std.fmt.bufPrintZ(&version_str_buf, "{}", .{target}) catch {
return CompileResult.fail("failed to format target version string");
};
var needs_download: bool = true;
const dest_z = target.exePath(&exe_path_buf, version_str, env, &needs_download);
if (needs_download) {
target.downloadToPath(env, allocator, dest_z) catch |err| {
const msg = switch (err) {
error.TargetNotFound => std.fmt.allocPrint(allocator, "Target platform '{}' is not available for download. Check if this version of Bun supports this target.", .{target}) catch "Target platform not available for download",
error.NetworkError => std.fmt.allocPrint(allocator, "Network error downloading executable for '{}'. Check your internet connection and proxy settings.", .{target}) catch "Network error downloading executable",
error.InvalidResponse => std.fmt.allocPrint(allocator, "Downloaded file for '{}' appears to be corrupted. Please try again.", .{target}) catch "Downloaded file is corrupted",
error.ExtractionFailed => std.fmt.allocPrint(allocator, "Failed to extract executable for '{}'. The download may be incomplete.", .{target}) catch "Failed to extract downloaded executable",
error.UnsupportedTarget => std.fmt.allocPrint(allocator, "Target '{}' is not supported", .{target}) catch "Unsupported target",
else => std.fmt.allocPrint(allocator, "Failed to download '{}': {s}", .{ target, @errorName(err) }) catch "Download failed",
};
return CompileResult.fail(msg);
};
}
free_self_exe = true;
break :blk allocator.dupeZ(u8, dest_z) catch bun.outOfMemory();
};
defer if (free_self_exe) {
allocator.free(self_exe);
};
var fd = inject(
const fd = inject(
bytes,
self_exe,
if (target.isDefault())
bun.selfExePath() catch |err| {
Output.err(err, "failed to get self executable path", .{});
Global.exit(1);
}
else
download(allocator, target, env) catch |err| {
Output.err(err, "failed to download cross-compiled bun executable", .{});
Global.exit(1);
},
.{ .windows_hide_console = windows_hide_console },
target,
);
defer if (fd != bun.invalid_fd) fd.close();
bun.debugAssert(fd.kind == .system);
if (Environment.isPosix) {
// Set executable permissions (0o755 = rwxr-xr-x) - makes it executable for owner, readable/executable for group and others
_ = Syscall.fchmod(fd, 0o755);
}
if (Environment.isWindows) {
var outfile_buf: bun.OSPathBuffer = undefined;
const outfile_slice = brk: {
@@ -963,59 +865,52 @@ pub const StandaloneModuleGraph = struct {
};
bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| {
_ = bun.windows.deleteOpenedFile(fd);
if (err == error.EISDIR) {
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.utf16(outfile_slice)});
} else {
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to move executable to result path: {s}", .{@errorName(err)}) catch "failed to move executable");
Output.err(err, "failed to move executable to result path", .{});
}
};
_ = bun.windows.deleteOpenedFile(fd);
Global.exit(1);
};
fd.close();
fd = bun.invalid_fd;
if (windows_icon) |icon_utf8| {
var icon_buf: bun.OSPathBuffer = undefined;
const icon = bun.strings.toWPathNormalized(&icon_buf, icon_utf8);
bun.windows.rescle.setIcon(outfile_slice, icon) catch |err| {
Output.debug("Warning: Failed to set Windows icon for executable: {s}", .{@errorName(err)});
bun.windows.rescle.setIcon(outfile_slice, icon) catch {
Output.warn("Failed to set executable icon", .{});
};
}
return .success;
return;
}
var buf: bun.PathBuffer = undefined;
const temp_location = bun.getFdPath(fd, &buf) catch |err| {
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to get path for fd: {s}", .{@errorName(err)}) catch "failed to get path for file descriptor");
};
const temp_posix = std.posix.toPosixPath(temp_location) catch |err| {
return CompileResult.fail(std.fmt.allocPrint(allocator, "path too long: {s}", .{@errorName(err)}) catch "path too long");
};
const outfile_basename = std.fs.path.basename(outfile);
const outfile_posix = std.posix.toPosixPath(outfile_basename) catch |err| {
return CompileResult.fail(std.fmt.allocPrint(allocator, "outfile name too long: {s}", .{@errorName(err)}) catch "outfile name too long");
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get path for fd: {s}", .{@errorName(err)});
Global.exit(1);
};
bun.sys.moveFileZWithHandle(
fd,
bun.FD.cwd(),
bun.sliceTo(&temp_posix, 0),
bun.sliceTo(&(try std.posix.toPosixPath(temp_location)), 0),
.fromStdDir(root_dir),
bun.sliceTo(&outfile_posix, 0),
bun.sliceTo(&(try std.posix.toPosixPath(std.fs.path.basename(outfile))), 0),
) catch |err| {
fd.close();
fd = bun.invalid_fd;
_ = Syscall.unlink(&temp_posix);
if (err == error.IsDir or err == error.EISDIR) {
return CompileResult.fail(std.fmt.allocPrint(allocator, "{s} is a directory. Please choose a different --outfile or delete the directory", .{outfile}) catch "outfile is a directory");
Output.prettyErrorln("<r><red>error<r><d>:<r> {} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.quote(outfile)});
} else {
return CompileResult.fail(std.fmt.allocPrint(allocator, "failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) }) catch "failed to rename file");
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to rename {s} to {s}: {s}", .{ temp_location, outfile, @errorName(err) });
}
};
_ = Syscall.unlink(
&(try std.posix.toPosixPath(temp_location)),
);
return .success;
Global.exit(1);
};
}
pub fn fromExecutable(allocator: std.mem.Allocator) !?StandaloneModuleGraph {

View File

@@ -226,6 +226,7 @@ pub fn BSSList(comptime ValueType: type, comptime _count: anytype) type {
}
};
const Allocator = std.mem.Allocator;
const Self = @This();
allocator: Allocator,
@@ -311,6 +312,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
return struct {
pub const Overflow = OverflowList([]const u8, count / 4);
const Allocator = std.mem.Allocator;
const Self = @This();
backing_buf: [count * item_length]u8,
@@ -494,6 +496,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_keys: bool, comptime estimated_key_length: usize, comptime remove_trailing_slashes: bool) type {
const max_index = count - 1;
const BSSMapType = struct {
const Allocator = std.mem.Allocator;
const Self = @This();
const Overflow = OverflowList(ValueType, count / 4);
@@ -770,36 +773,6 @@ pub fn BSSMap(comptime ValueType: type, comptime count: anytype, comptime store_
};
}
pub fn isDefault(allocator: Allocator) bool {
return allocator.vtable == c_allocator.vtable;
}
/// Allocate memory for a value of type `T` using the provided allocator, and initialize the memory
/// with `value`.
///
/// If `allocator` is `bun.default_allocator`, this will internally use `bun.tryNew` to benefit from
/// the added assertions.
pub fn create(comptime T: type, allocator: Allocator, value: T) OOM!*T {
if ((comptime Environment.allow_assert) and isDefault(allocator)) {
return bun.tryNew(T, value);
}
const ptr = try allocator.create(T);
ptr.* = value;
return ptr;
}
/// Free memory previously allocated by `create`.
///
/// The memory must have been allocated by the `create` function in this namespace, not
/// directly by `allocator.create`.
pub fn destroy(allocator: Allocator, ptr: anytype) void {
if ((comptime Environment.allow_assert) and isDefault(allocator)) {
bun.destroy(ptr);
} else {
allocator.destroy(ptr);
}
}
const basic = if (bun.use_mimalloc)
@import("./allocators/basic.zig")
else
@@ -807,7 +780,6 @@ else
const Environment = @import("./env.zig");
const std = @import("std");
const Allocator = std.mem.Allocator;
const bun = @import("bun");
const OOM = bun.OOM;

View File

@@ -3,7 +3,7 @@ const log = bun.Output.scoped(.mimalloc, .hidden);
fn mimalloc_free(
_: *anyopaque,
buf: []u8,
alignment: Alignment,
alignment: mem.Alignment,
_: usize,
) void {
if (comptime Environment.enable_logs)
@@ -23,7 +23,7 @@ fn mimalloc_free(
}
const MimallocAllocator = struct {
fn alignedAlloc(len: usize, alignment: Alignment) ?[*]u8 {
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
if (comptime Environment.enable_logs)
log("mi_alloc({d}, {d})", .{ len, alignment.toByteUnits() });
@@ -48,15 +48,15 @@ const MimallocAllocator = struct {
return mimalloc.mi_malloc_size(ptr);
}
fn alloc_with_default_allocator(_: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
fn alloc_with_default_allocator(_: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
return alignedAlloc(len, alignment);
}
fn resize_with_default_allocator(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
fn resize_with_default_allocator(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
return mimalloc.mi_expand(buf.ptr, new_len) != null;
}
fn remap_with_default_allocator(_: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
fn remap_with_default_allocator(_: *anyopaque, buf: []u8, alignment: mem.Alignment, new_len: usize, _: usize) ?[*]u8 {
return @ptrCast(mimalloc.mi_realloc_aligned(buf.ptr, new_len, alignment.toByteUnits()));
}
@@ -76,7 +76,7 @@ const c_allocator_vtable = &Allocator.VTable{
};
const ZAllocator = struct {
fn alignedAlloc(len: usize, alignment: Alignment) ?[*]u8 {
fn alignedAlloc(len: usize, alignment: mem.Alignment) ?[*]u8 {
log("ZAllocator.alignedAlloc: {d}\n", .{len});
const ptr = if (mimalloc.mustUseAlignedAlloc(alignment))
@@ -100,11 +100,11 @@ const ZAllocator = struct {
return mimalloc.mi_malloc_size(ptr);
}
fn alloc_with_z_allocator(_: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
fn alloc_with_z_allocator(_: *anyopaque, len: usize, alignment: mem.Alignment, _: usize) ?[*]u8 {
return alignedAlloc(len, alignment);
}
fn resize_with_z_allocator(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
fn resize_with_z_allocator(_: *anyopaque, buf: []u8, _: mem.Alignment, new_len: usize, _: usize) bool {
if (new_len <= buf.len) {
return true;
}
@@ -135,7 +135,7 @@ pub const z_allocator = Allocator{
const z_allocator_vtable = Allocator.VTable{
.alloc = &ZAllocator.alloc_with_z_allocator,
.resize = &ZAllocator.resize_with_z_allocator,
.remap = &Allocator.noRemap,
.remap = &std.mem.Allocator.noRemap,
.free = &ZAllocator.free_with_z_allocator,
};
@@ -150,5 +150,5 @@ const std = @import("std");
const bun = @import("bun");
const mimalloc = bun.mimalloc;
const Alignment = std.mem.Alignment;
const Allocator = std.mem.Allocator;
const mem = @import("std").mem;
const Allocator = mem.Allocator;

View File

@@ -111,7 +111,6 @@ pub const Features = struct {
pub var csrf_generate: usize = 0;
pub var unsupported_uv_function: usize = 0;
pub var exited: usize = 0;
pub var yarn_migration: usize = 0;
comptime {
@export(&napi_module_register, .{ .name = "Bun__napi_module_register_count" });

View File

@@ -3041,8 +3041,6 @@ pub const api = struct {
node_linker: ?bun.install.PackageManager.Options.NodeLinker = null,
security_scanner: ?[]const u8 = null,
pub fn decode(reader: anytype) anyerror!BunInstall {
var this = std.mem.zeroes(BunInstall);

View File

@@ -3072,9 +3072,9 @@ pub const Data = union(Tag) {
.e_null => jsc.JSValue.null,
.e_undefined => .js_undefined,
.e_boolean => |boolean| if (boolean.value)
.true
jsc.JSValue.true
else
.false,
jsc.JSValue.false,
.e_number => |e| e.toJS(),
// .e_big_int => |e| e.toJS(ctx, exception),

View File

@@ -2903,18 +2903,22 @@ fn encodeSerializedFailures(
buf: *std.ArrayList(u8),
inspector_agent: ?*BunFrontendDevServerAgent,
) bun.OOM!void {
var all_failures_len: usize = 0;
for (failures) |fail| all_failures_len += fail.data.len;
var all_failures = try std.ArrayListUnmanaged(u8).initCapacity(dev.allocator, all_failures_len);
defer all_failures.deinit(dev.allocator);
for (failures) |fail| all_failures.appendSliceAssumeCapacity(fail.data);
const failures_start_buf_pos = buf.items.len;
for (failures) |fail| {
const len = bun.base64.encodeLen(fail.data);
const len = bun.base64.encodeLen(all_failures.items);
try buf.ensureUnusedCapacity(len);
const to_write_into = buf.unusedCapacitySlice();
buf.items.len += bun.base64.encode(to_write_into, all_failures.items);
try buf.ensureUnusedCapacity(len);
const start = buf.items.len;
buf.items.len += len;
const to_write_into = buf.items[start..];
var encoded = to_write_into[0..bun.base64.encode(to_write_into, fail.data)];
while (encoded.len > 0 and encoded[encoded.len - 1] == '=') {
encoded.len -= 1;
}
buf.items.len = start + encoded.len;
}
// Re-use the encoded buffer to avoid encoding failures more times than neccecary.
if (inspector_agent) |agent| {

View File

@@ -183,13 +183,7 @@ pub fn IncrementalGraph(side: bake.Side) type {
comptime {
if (!Environment.ci_assert) {
// On Windows, struct padding can cause size to be larger than expected
// Allow for platform-specific padding while ensuring reasonable bounds
const expected_size = @sizeOf(u64) * 5; // 40 bytes
const actual_size = @sizeOf(@This());
if (actual_size < expected_size or actual_size > expected_size + 16) {
@compileError(std.fmt.comptimePrint("Struct size {} is outside expected range [{}, {}]", .{ actual_size, expected_size, expected_size + 16 }));
}
bun.assert_eql(@sizeOf(@This()), @sizeOf(u64) * 5);
bun.assert_eql(@alignOf(@This()), @alignOf([*]u8));
}
}

View File

@@ -310,11 +310,7 @@ pub const VMHolder = struct {
};
pub inline fn get() *VirtualMachine {
return getOrNull().?;
}
pub inline fn getOrNull() ?*VirtualMachine {
return VMHolder.vm;
return VMHolder.vm.?;
}
pub fn getMainThreadVM() ?*VirtualMachine {
@@ -1618,7 +1614,7 @@ fn _resolve(
source_to_use,
normalized_specifier,
if (is_esm) .stmt else .require,
jsc_vm.transpiler.resolver.opts.global_cache,
if (jsc_vm.standalone_module_graph == null) jsc_vm.transpiler.resolver.opts.global_cache else .disable,
)) {
.success => |r| r,
.failure => |e| e,

View File

@@ -43,7 +43,6 @@ pub const MatchedRoute = @import("./api/filesystem_router.zig").MatchedRoute;
pub const NativeBrotli = @import("./node/zlib/NativeBrotli.zig");
pub const NativeZlib = @import("./node/zlib/NativeZlib.zig");
pub const Postgres = @import("../sql/postgres.zig");
pub const MySQL = @import("../sql/mysql.zig");
pub const ResolveMessage = @import("./ResolveMessage.zig").ResolveMessage;
pub const Shell = @import("../shell/shell.zig");
pub const UDPSocket = @import("./api/bun/udp_socket.zig").UDPSocket;

View File

@@ -1246,13 +1246,13 @@ pub fn mmapFile(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.
var map_size: ?usize = null;
if (args.nextEat()) |opts| {
flags.TYPE = if ((try opts.getBooleanLoose(globalThis, "shared")) orelse true)
flags.TYPE = if ((try opts.get(globalThis, "shared") orelse JSValue.true).toBoolean())
.SHARED
else
.PRIVATE;
if (@hasField(std.c.MAP, "SYNC")) {
if ((try opts.getBooleanLoose(globalThis, "sync")) orelse false) {
if ((try opts.get(globalThis, "sync") orelse JSValue.false).toBoolean()) {
flags.TYPE = .SHARED_VALIDATE;
flags.SYNC = true;
}

View File

@@ -13,7 +13,7 @@ pub const xxHash32 = hashWrap(struct {
}
});
pub const xxHash64 = hashWrap(struct {
pub fn hash(seed: u64, bytes: []const u8) u64 {
pub fn hash(seed: u32, bytes: []const u8) u64 {
// sidestep .hash taking in anytype breaking ArgTuple
// downstream by forcing a type signature on the input
return std.hash.XxHash64.hash(seed, bytes);

View File

@@ -37,119 +37,6 @@ pub const JSBundler = struct {
env_behavior: api.DotEnvBehavior = .disable,
env_prefix: OwnedString = OwnedString.initEmpty(bun.default_allocator),
tsconfig_override: OwnedString = OwnedString.initEmpty(bun.default_allocator),
compile: ?CompileOptions = null,
pub const CompileOptions = struct {
compile_target: CompileTarget = .{},
exec_argv: OwnedString = OwnedString.initEmpty(bun.default_allocator),
executable_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
windows_hide_console: bool = false,
windows_icon_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
windows_title: OwnedString = OwnedString.initEmpty(bun.default_allocator),
outfile: OwnedString = OwnedString.initEmpty(bun.default_allocator),
pub fn fromJS(globalThis: *jsc.JSGlobalObject, config: jsc.JSValue, allocator: std.mem.Allocator, compile_target: ?CompileTarget) JSError!?CompileOptions {
var this = CompileOptions{
.exec_argv = OwnedString.initEmpty(allocator),
.executable_path = OwnedString.initEmpty(allocator),
.windows_icon_path = OwnedString.initEmpty(allocator),
.windows_title = OwnedString.initEmpty(allocator),
.outfile = OwnedString.initEmpty(allocator),
.compile_target = compile_target orelse .{},
};
errdefer this.deinit();
const object = brk: {
const compile_value = try config.getTruthy(globalThis, "compile") orelse return null;
if (compile_value.isBoolean()) {
if (compile_value == .false) {
return null;
}
return this;
} else if (compile_value.isString()) {
this.compile_target = try CompileTarget.fromJS(globalThis, compile_value);
return this;
} else if (compile_value.isObject()) {
break :brk compile_value;
} else {
return globalThis.throwInvalidArguments("Expected compile to be a boolean or string or options object", .{});
}
};
if (try object.getOwn(globalThis, "target")) |target| {
this.compile_target = try CompileTarget.fromJS(globalThis, target);
}
if (try object.getOwnArray(globalThis, "execArgv")) |exec_argv| {
var iter = try exec_argv.arrayIterator(globalThis);
var is_first = true;
while (try iter.next()) |arg| {
var slice = try arg.toSlice(globalThis, bun.default_allocator);
defer slice.deinit();
if (is_first) {
is_first = false;
try this.exec_argv.appendSlice(slice.slice());
} else {
try this.exec_argv.appendChar(' ');
try this.exec_argv.appendSlice(slice.slice());
}
}
}
if (try object.getOwn(globalThis, "executablePath")) |executable_path| {
var slice = try executable_path.toSlice(globalThis, bun.default_allocator);
defer slice.deinit();
if (bun.sys.existsAtType(bun.FD.cwd(), slice.slice()).unwrapOr(.directory) != .file) {
return globalThis.throwInvalidArguments("executablePath must be a valid path to a Bun executable", .{});
}
try this.executable_path.appendSliceExact(slice.slice());
}
if (try object.getOwnTruthy(globalThis, "windows")) |windows| {
if (!windows.isObject()) {
return globalThis.throwInvalidArguments("windows must be an object", .{});
}
if (try windows.getOwn(globalThis, "hideConsole")) |hide_console| {
this.windows_hide_console = hide_console.toBoolean();
}
if (try windows.getOwn(globalThis, "icon")) |windows_icon_path| {
var slice = try windows_icon_path.toSlice(globalThis, bun.default_allocator);
defer slice.deinit();
if (bun.sys.existsAtType(bun.FD.cwd(), slice.slice()).unwrapOr(.directory) != .file) {
return globalThis.throwInvalidArguments("windows.icon must be a valid path to an ico file", .{});
}
try this.windows_icon_path.appendSliceExact(slice.slice());
}
if (try windows.getOwn(globalThis, "title")) |windows_title| {
var slice = try windows_title.toSlice(globalThis, bun.default_allocator);
defer slice.deinit();
try this.windows_title.appendSliceExact(slice.slice());
}
}
if (try object.getOwn(globalThis, "outfile")) |outfile| {
var slice = try outfile.toSlice(globalThis, bun.default_allocator);
defer slice.deinit();
try this.outfile.appendSliceExact(slice.slice());
}
return this;
}
pub fn deinit(this: *CompileOptions) void {
this.exec_argv.deinit();
this.executable_path.deinit();
this.windows_icon_path.deinit();
this.windows_title.deinit();
this.outfile.deinit();
}
};
pub const List = bun.StringArrayHashMapUnmanaged(Config);
@@ -171,20 +58,9 @@ pub const JSBundler = struct {
errdefer if (plugins.*) |plugin| plugin.deinit();
var did_set_target = false;
if (try config.getOptional(globalThis, "target", ZigString.Slice)) |slice| {
defer slice.deinit();
if (strings.hasPrefixComptime(slice.slice(), "bun-")) {
this.compile = .{
.compile_target = try CompileTarget.fromSlice(globalThis, slice.slice()),
};
this.target = .bun;
did_set_target = true;
} else {
this.target = options.Target.Map.get(slice.slice()) orelse {
return globalThis.throwInvalidArguments("Expected target to be one of 'browser', 'node', 'bun', 'macro', or 'bun-<target>', got {s}", .{slice.slice()});
};
did_set_target = true;
}
if (try config.getOptionalEnum(globalThis, "target", options.Target)) |target| {
this.target = target;
did_set_target = true;
}
// Plugins must be resolved first as they are allowed to mutate the config JSValue
@@ -574,52 +450,6 @@ pub const JSBundler = struct {
this.throw_on_error = flag;
}
if (try CompileOptions.fromJS(
globalThis,
config,
bun.default_allocator,
if (this.compile) |*compile| compile.compile_target else null,
)) |compile| {
this.compile = compile;
}
if (this.compile) |*compile| {
this.target = .bun;
const define_keys = compile.compile_target.defineKeys();
const define_values = compile.compile_target.defineValues();
for (define_keys, define_values) |key, value| {
try this.define.insert(key, value);
}
const base_public_path = bun.StandaloneModuleGraph.targetBasePublicPath(this.compile.?.compile_target.os, "root/");
try this.public_path.append(base_public_path);
if (compile.outfile.isEmpty()) {
const entry_point = this.entry_points.keys()[0];
var outfile = std.fs.path.basename(entry_point);
const ext = std.fs.path.extension(outfile);
if (ext.len > 0) {
outfile = outfile[0 .. outfile.len - ext.len];
}
if (strings.eqlComptime(outfile, "index")) {
outfile = std.fs.path.basename(std.fs.path.dirname(entry_point) orelse "index");
}
if (strings.eqlComptime(outfile, "bun")) {
outfile = std.fs.path.basename(std.fs.path.dirname(entry_point) orelse "bun");
}
// If argv[0] is "bun" or "bunx", we don't check if the binary is standalone
if (strings.eqlComptime(outfile, "bun") or strings.eqlComptime(outfile, "bunx")) {
return globalThis.throwInvalidArguments("cannot use compile with an output file named 'bun' because bun won't realize it's a standalone executable. Please choose a different name for compile.outfile", .{});
}
try compile.outfile.appendSliceExact(outfile);
}
}
return this;
}
@@ -676,9 +506,6 @@ pub const JSBundler = struct {
self.conditions.deinit();
self.drop.deinit();
self.banner.deinit();
if (self.compile) |*compile| {
compile.deinit();
}
self.env_prefix.deinit();
self.footer.deinit();
self.tsconfig_override.deinit();
@@ -694,7 +521,7 @@ pub const JSBundler = struct {
}
var plugins: ?*Plugin = null;
const config = try Config.fromJS(globalThis, arguments[0], &plugins, bun.default_allocator);
const config = try Config.fromJS(globalThis, arguments[0], &plugins, globalThis.allocator());
return bun.BundleV2.generateFromJavaScript(
config,
@@ -1454,7 +1281,6 @@ pub const BuildArtifact = struct {
const string = []const u8;
const CompileTarget = @import("../../compile_target.zig");
const Fs = @import("../../fs.zig");
const resolve_path = @import("../../resolver/resolve_path.zig");
const std = @import("std");

View File

@@ -46,9 +46,6 @@ pub const All = struct {
}
} = .{},
/// Updates the "Date" header.
date_header_timer: DateHeaderTimer = .{},
pub fn init() @This() {
return .{
.thread_id = std.Thread.getCurrentId(),
@@ -202,27 +199,6 @@ pub const All = struct {
return VirtualMachine.get().timer.last_id;
}
fn isDateTimerActive(this: *const All) bool {
return this.date_header_timer.event_loop_timer.state == .ACTIVE;
}
pub fn updateDateHeaderTimerIfNecessary(this: *All, loop: *const uws.Loop, vm: *VirtualMachine) void {
if (loop.shouldEnableDateHeaderTimer()) {
if (!this.isDateTimerActive()) {
this.date_header_timer.enable(
vm,
// Be careful to avoid adding extra calls to bun.timespec.now()
// when it's not needed.
&bun.timespec.now(),
);
}
} else {
// don't un-schedule it here.
// it's better to wake up an extra 1 time after a second idle
// than to have to check a date potentially on every single HTTP request.
}
}
pub fn getTimeout(this: *All, spec: *timespec, vm: *VirtualMachine) bool {
var maybe_now: ?timespec = null;
while (this.timers.peek()) |min| {
@@ -595,8 +571,6 @@ pub const ID = extern struct {
/// A timer created by WTF code and invoked by Bun's event loop
pub const WTFTimer = @import("./Timer/WTFTimer.zig");
pub const DateHeaderTimer = @import("./Timer/DateHeaderTimer.zig");
pub const internal_bindings = struct {
/// Node.js has some tests that check whether timers fire at the right time. They check this
/// with the internal binding `getLibuvNow()`, which returns an integer in milliseconds. This
@@ -624,7 +598,6 @@ const Environment = bun.Environment;
const JSError = bun.JSError;
const assert = bun.assert;
const timespec = bun.timespec;
const uws = bun.uws;
const heap = bun.io.heap;
const uv = bun.windows.libuv;

View File

@@ -1,82 +0,0 @@
/// DateHeaderTimer manages the periodic updating of the "Date" header in Bun.serve().
///
/// This timer ensures that HTTP responses include an up-to-date Date header by
/// updating the date every second when there are active connections.
///
/// Behavior:
/// - When sweep_timer_count > 0 (active connections), the timer should be running
/// - When sweep_timer_count = 0 (no connections), the timer doesn't get rescheduled.
/// - If the timer was already running, no changes are made.
/// - If the timer was not running and needs to start:
/// - If the last update was > 1 second ago, update the date immediately and schedule next update
/// - If the last update was < 1 second ago, just schedule the next update
///
/// Note that we only check for potential updates ot this timer once per event loop tick.
const DateHeaderTimer = @This();
event_loop_timer: jsc.API.Timer.EventLoopTimer = .{
.tag = .DateHeaderTimer,
.next = .epoch,
},
/// Schedule the "Date"" header timer.
///
/// The logic handles two scenarios:
/// 1. If the timer was recently updated (< 1 second ago), just reschedule it
/// 2. If the timer is stale (> 1 second since last update), update the date immediately and reschedule
pub fn enable(this: *DateHeaderTimer, vm: *VirtualMachine, now: *const bun.timespec) void {
bun.debugAssert(this.event_loop_timer.state != .ACTIVE);
const last_update = this.event_loop_timer.next;
const elapsed = now.duration(&last_update).ms();
// If the last update was more than 1 second ago, the date is stale
if (elapsed >= std.time.ms_per_s) {
// Update the date immediately since it's stale
log("updating stale timer & rescheduling for 1 second later", .{});
// updateDate() is an expensive function.
vm.uwsLoop().updateDate();
vm.timer.update(&this.event_loop_timer, &now.addMs(std.time.ms_per_s));
} else {
// The date was updated recently, just reschedule for the next second
log("rescheduling timer", .{});
vm.timer.insert(&this.event_loop_timer);
}
}
pub fn run(this: *DateHeaderTimer, vm: *VirtualMachine) void {
this.event_loop_timer.state = .FIRED;
const loop = vm.uwsLoop();
const now = bun.timespec.now();
// Record when we last ran it.
this.event_loop_timer.next = now;
log("run", .{});
// updateDate() is an expensive function.
loop.updateDate();
if (loop.internal_loop_data.sweep_timer_count > 0) {
// Reschedule it automatically for 1 second later.
this.event_loop_timer.next = now.addMs(std.time.ms_per_s);
vm.timer.insert(&this.event_loop_timer);
}
}
pub export fn Bun__internal_ensureDateHeaderTimerIsEnabled(loop: *uws.Loop) callconv(.C) void {
if (jsc.VirtualMachine.getOrNull()) |vm| {
vm.timer.updateDateHeaderTimerIfNecessary(loop, vm);
}
}
const log = bun.Output.scoped(.DateHeaderTimer, .visible);
const std = @import("std");
const bun = @import("bun");
const uws = bun.uws;
const jsc = bun.jsc;
const VirtualMachine = jsc.VirtualMachine;

View File

@@ -59,15 +59,12 @@ pub const Tag = if (Environment.isWindows) enum {
WTFTimer,
PostgresSQLConnectionTimeout,
PostgresSQLConnectionMaxLifetime,
MySQLConnectionTimeout,
MySQLConnectionMaxLifetime,
ValkeyConnectionTimeout,
ValkeyConnectionReconnect,
SubprocessTimeout,
DevServerSweepSourceMaps,
DevServerMemoryVisualizerTick,
AbortSignalTimeout,
DateHeaderTimer,
pub fn Type(comptime T: Tag) type {
return switch (T) {
@@ -82,8 +79,6 @@ pub const Tag = if (Environment.isWindows) enum {
.WTFTimer => WTFTimer,
.PostgresSQLConnectionTimeout => jsc.Postgres.PostgresSQLConnection,
.PostgresSQLConnectionMaxLifetime => jsc.Postgres.PostgresSQLConnection,
.MySQLConnectionTimeout => jsc.MySQL.MySQLConnection,
.MySQLConnectionMaxLifetime => jsc.MySQL.MySQLConnection,
.SubprocessTimeout => jsc.Subprocess,
.ValkeyConnectionReconnect => jsc.API.Valkey,
.ValkeyConnectionTimeout => jsc.API.Valkey,
@@ -91,7 +86,6 @@ pub const Tag = if (Environment.isWindows) enum {
.DevServerMemoryVisualizerTick,
=> bun.bake.DevServer,
.AbortSignalTimeout => jsc.WebCore.AbortSignal.Timeout,
.DateHeaderTimer => jsc.API.Timer.DateHeaderTimer,
};
}
} else enum {
@@ -105,15 +99,12 @@ pub const Tag = if (Environment.isWindows) enum {
DNSResolver,
PostgresSQLConnectionTimeout,
PostgresSQLConnectionMaxLifetime,
MySQLConnectionTimeout,
MySQLConnectionMaxLifetime,
ValkeyConnectionTimeout,
ValkeyConnectionReconnect,
SubprocessTimeout,
DevServerSweepSourceMaps,
DevServerMemoryVisualizerTick,
AbortSignalTimeout,
DateHeaderTimer,
pub fn Type(comptime T: Tag) type {
return switch (T) {
@@ -127,8 +118,6 @@ pub const Tag = if (Environment.isWindows) enum {
.DNSResolver => DNSResolver,
.PostgresSQLConnectionTimeout => jsc.Postgres.PostgresSQLConnection,
.PostgresSQLConnectionMaxLifetime => jsc.Postgres.PostgresSQLConnection,
.MySQLConnectionTimeout => jsc.MySQL.MySQLConnection,
.MySQLConnectionMaxLifetime => jsc.MySQL.MySQLConnection,
.ValkeyConnectionTimeout => jsc.API.Valkey,
.ValkeyConnectionReconnect => jsc.API.Valkey,
.SubprocessTimeout => jsc.Subprocess,
@@ -136,7 +125,6 @@ pub const Tag = if (Environment.isWindows) enum {
.DevServerMemoryVisualizerTick,
=> bun.bake.DevServer,
.AbortSignalTimeout => jsc.WebCore.AbortSignal.Timeout,
.DateHeaderTimer => jsc.API.Timer.DateHeaderTimer,
};
}
};
@@ -197,8 +185,6 @@ pub fn fire(self: *Self, now: *const timespec, vm: *VirtualMachine) Arm {
switch (self.tag) {
.PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", self))).onConnectionTimeout(),
.PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", self))).onMaxLifetimeTimeout(),
.MySQLConnectionTimeout => return @as(*api.MySQL.MySQLConnection, @alignCast(@fieldParentPtr("timer", self))).onConnectionTimeout(),
.MySQLConnectionMaxLifetime => return @as(*api.MySQL.MySQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", self))).onMaxLifetimeTimeout(),
.ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", self))).onConnectionTimeout(),
.ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", self))).onReconnectTimer(),
.DevServerMemoryVisualizerTick => return bun.bake.DevServer.emitMemoryVisualizerMessageTimer(self, now),
@@ -208,11 +194,6 @@ pub fn fire(self: *Self, now: *const timespec, vm: *VirtualMachine) Arm {
timeout.run(vm);
return .disarm;
},
.DateHeaderTimer => {
const date_header_timer = @as(*jsc.API.Timer.DateHeaderTimer, @fieldParentPtr("event_loop_timer", self));
date_header_timer.run(vm);
return .disarm;
},
inline else => |t| {
if (@FieldType(t.Type(), "event_loop_timer") != Self) {
@compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'");

View File

@@ -206,7 +206,7 @@ const FullSettingsPayload = packed struct(u288) {
result.put(globalObject, jsc.ZigString.static("maxHeaderSize"), jsc.JSValue.jsNumber(this.maxHeaderListSize));
// TODO: we dont support this setting yet see https://nodejs.org/api/http2.html#settings-object
// we should also support customSettings
result.put(globalObject, jsc.ZigString.static("enableConnectProtocol"), .false);
result.put(globalObject, jsc.ZigString.static("enableConnectProtocol"), jsc.JSValue.jsBoolean(false));
return result;
}
@@ -3040,7 +3040,7 @@ pub const H2FrameParser = struct {
};
if (!stream.canSendData() and !stream.canReceiveData()) {
return .false;
return jsc.JSValue.jsBoolean(false);
}
if (!options.isObject()) {
@@ -3083,7 +3083,7 @@ pub const H2FrameParser = struct {
}
if (parent_id == stream.id) {
this.sendGoAway(stream.id, ErrorCode.PROTOCOL_ERROR, "Stream with self dependency", this.lastStreamID, true);
return .false;
return jsc.JSValue.jsBoolean(false);
}
stream.streamDependency = parent_id;
@@ -3111,7 +3111,7 @@ pub const H2FrameParser = struct {
_ = frame.write(@TypeOf(writer), writer);
_ = priority.write(@TypeOf(writer), writer);
}
return .true;
return jsc.JSValue.jsBoolean(true);
}
pub fn rstStream(this: *H2FrameParser, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
log("rstStream", .{});
@@ -3143,7 +3143,7 @@ pub const H2FrameParser = struct {
this.endStream(stream, @enumFromInt(error_code));
return .true;
return jsc.JSValue.jsBoolean(true);
}
const MemoryWriter = struct {
@@ -3542,7 +3542,7 @@ pub const H2FrameParser = struct {
};
if (!stream.canSendData()) {
this.dispatchWriteCallback(callback_arg);
return .false;
return jsc.JSValue.jsBoolean(false);
}
const encoding: jsc.Node.Encoding = brk: {
@@ -3571,7 +3571,7 @@ pub const H2FrameParser = struct {
this.sendData(stream, buffer.slice(), close, callback_arg);
return .true;
return jsc.JSValue.jsBoolean(true);
}
fn getNextStreamID(this: *H2FrameParser) u32 {

View File

@@ -84,7 +84,7 @@ pub const ProcessExitHandler = struct {
LifecycleScriptSubprocess,
ShellSubprocess,
ProcessHandle,
SecurityScanSubprocess,
SyncProcess,
},
);
@@ -115,10 +115,6 @@ pub const ProcessExitHandler = struct {
const subprocess = this.ptr.as(ShellSubprocess);
subprocess.onProcessExit(process, status, rusage);
},
@field(TaggedPointer.Tag, @typeName(SecurityScanSubprocess)) => {
const subprocess = this.ptr.as(SecurityScanSubprocess);
subprocess.onProcessExit(process, status, rusage);
},
@field(TaggedPointer.Tag, @typeName(SyncProcess)) => {
const subprocess = this.ptr.as(SyncProcess);
if (comptime Environment.isPosix) {
@@ -2250,12 +2246,10 @@ const bun = @import("bun");
const Environment = bun.Environment;
const Output = bun.Output;
const PosixSpawn = bun.spawn;
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
const Maybe = bun.sys.Maybe;
const ShellSubprocess = bun.shell.ShellSubprocess;
const uv = bun.windows.libuv;
const LifecycleScriptSubprocess = bun.install.LifecycleScriptSubprocess;
const SecurityScanSubprocess = bun.install.SecurityScanSubprocess;
const jsc = bun.jsc;
const Subprocess = jsc.Subprocess;

View File

@@ -905,7 +905,7 @@ pub fn NewSocket(comptime ssl: bool) type {
pub fn endBuffered(this: *This, globalObject: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {
if (this.socket.isDetached()) {
this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator);
return .false;
return JSValue.jsBoolean(false);
}
const args = callframe.argumentsUndef(2);
@@ -2044,7 +2044,7 @@ pub fn jsIsNamedPipeSocket(global: *jsc.JSGlobalObject, callframe: *jsc.CallFram
} else if (socket.as(TLSSocket)) |this| {
return jsc.JSValue.jsBoolean(this.socket.isNamedPipe());
}
return .false;
return jsc.JSValue.jsBoolean(false);
}
pub fn jsGetBufferedAmount(global: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!JSValue {

View File

@@ -98,7 +98,7 @@ pub fn setMaxSendFragment(this: *This, globalObject: *jsc.JSGlobalObject, callfr
return globalObject.throw("Expected size to be less than 16385", .{});
}
const ssl_ptr = this.socket.ssl() orelse return .false;
const ssl_ptr = this.socket.ssl() orelse return JSValue.jsBoolean(false);
return JSValue.jsBoolean(BoringSSL.SSL_set_max_send_fragment(ssl_ptr, @as(usize, @intCast(size))) == 1);
}
@@ -440,11 +440,11 @@ pub fn getALPNProtocol(this: *This, globalObject: *jsc.JSGlobalObject) bun.JSErr
var alpn_proto: [*c]const u8 = null;
var alpn_proto_len: u32 = 0;
const ssl_ptr = this.socket.ssl() orelse return .false;
const ssl_ptr = this.socket.ssl() orelse return JSValue.jsBoolean(false);
BoringSSL.SSL_get0_alpn_selected(ssl_ptr, &alpn_proto, &alpn_proto_len);
if (alpn_proto == null or alpn_proto_len == 0) {
return .false;
return JSValue.jsBoolean(false);
}
const slice = alpn_proto[0..alpn_proto_len];

View File

@@ -1,6 +1,6 @@
pub const Stdio = union(enum) {
inherit,
capture: struct { buf: *bun.ByteList },
capture: struct { fd: bun.FileDescriptor, buf: *bun.ByteList },
ignore,
fd: bun.FileDescriptor,
dup2: struct {

File diff suppressed because it is too large Load Diff

View File

@@ -1,195 +0,0 @@
pub const Readable = union(enum) {
fd: bun.FileDescriptor,
memfd: bun.FileDescriptor,
pipe: *PipeReader,
inherit: void,
ignore: void,
closed: void,
/// Eventually we will implement Readables created from blobs and array buffers.
/// When we do that, `buffer` will be borrowed from those objects.
///
/// When a buffered `pipe` finishes reading from its file descriptor,
/// the owning `Readable` will be convered into this variant and the pipe's
/// buffer will be taken as an owned `CowString`.
buffer: CowString,
pub fn memoryCost(this: *const Readable) usize {
return switch (this.*) {
.pipe => @sizeOf(PipeReader) + this.pipe.memoryCost(),
.buffer => this.buffer.length(),
else => 0,
};
}
pub fn hasPendingActivity(this: *const Readable) bool {
return switch (this.*) {
.pipe => this.pipe.hasPendingActivity(),
else => false,
};
}
pub fn ref(this: *Readable) void {
switch (this.*) {
.pipe => {
this.pipe.updateRef(true);
},
else => {},
}
}
pub fn unref(this: *Readable) void {
switch (this.*) {
.pipe => {
this.pipe.updateRef(false);
},
else => {},
}
}
pub fn init(stdio: Stdio, event_loop: *jsc.EventLoop, process: *Subprocess, result: StdioResult, allocator: std.mem.Allocator, max_size: ?*MaxBuf, is_sync: bool) Readable {
_ = allocator; // autofix
_ = is_sync; // autofix
Subprocess.assertStdioResult(result);
if (comptime Environment.isPosix) {
if (stdio == .pipe) {
_ = bun.sys.setNonblocking(result.?);
}
}
return switch (stdio) {
.inherit => Readable{ .inherit = {} },
.ignore, .ipc, .path => Readable{ .ignore = {} },
.fd => |fd| if (Environment.isPosix) Readable{ .fd = result.? } else Readable{ .fd = fd },
.memfd => if (Environment.isPosix) Readable{ .memfd = stdio.memfd } else Readable{ .ignore = {} },
.dup2 => |dup2| if (Environment.isPosix) Output.panic("TODO: implement dup2 support in Stdio readable", .{}) else Readable{ .fd = dup2.out.toFd() },
.pipe => Readable{ .pipe = PipeReader.create(event_loop, process, result, max_size) },
.array_buffer, .blob => Output.panic("TODO: implement ArrayBuffer & Blob support in Stdio readable", .{}),
.capture => Output.panic("TODO: implement capture support in Stdio readable", .{}),
.readable_stream => Readable{ .ignore = {} }, // ReadableStream is handled separately
};
}
pub fn onClose(this: *Readable, _: ?bun.sys.Error) void {
this.* = .closed;
}
pub fn onReady(_: *Readable, _: ?jsc.WebCore.Blob.SizeType, _: ?jsc.WebCore.Blob.SizeType) void {}
pub fn onStart(_: *Readable) void {}
pub fn close(this: *Readable) void {
switch (this.*) {
.memfd => |fd| {
this.* = .{ .closed = {} };
fd.close();
},
.fd => |_| {
this.* = .{ .closed = {} };
},
.pipe => {
this.pipe.close();
},
else => {},
}
}
pub fn finalize(this: *Readable) void {
switch (this.*) {
.memfd => |fd| {
this.* = .{ .closed = {} };
fd.close();
},
.fd => {
this.* = .{ .closed = {} };
},
.pipe => |pipe| {
defer pipe.detach();
this.* = .{ .closed = {} };
},
.buffer => |*buf| {
buf.deinit(bun.default_allocator);
},
else => {},
}
}
pub fn toJS(this: *Readable, globalThis: *jsc.JSGlobalObject, exited: bool) bun.JSError!JSValue {
_ = exited; // autofix
switch (this.*) {
// should only be reachable when the entire output is buffered.
.memfd => return this.toBufferedValue(globalThis),
.fd => |fd| {
return fd.toJS(globalThis);
},
.pipe => |pipe| {
defer pipe.detach();
this.* = .{ .closed = {} };
return pipe.toJS(globalThis);
},
.buffer => |*buffer| {
defer this.* = .{ .closed = {} };
if (buffer.length() == 0) {
return jsc.WebCore.ReadableStream.empty(globalThis);
}
const own = try buffer.takeSlice(bun.default_allocator);
return jsc.WebCore.ReadableStream.fromOwnedSlice(globalThis, own, 0);
},
else => {
return .js_undefined;
},
}
}
pub fn toBufferedValue(this: *Readable, globalThis: *jsc.JSGlobalObject) bun.JSError!JSValue {
switch (this.*) {
.fd => |fd| {
return fd.toJS(globalThis);
},
.memfd => |fd| {
if (comptime !Environment.isPosix) {
Output.panic("memfd is only supported on Linux", .{});
}
this.* = .{ .closed = {} };
return jsc.ArrayBuffer.toJSBufferFromMemfd(fd, globalThis);
},
.pipe => |pipe| {
defer pipe.detach();
this.* = .{ .closed = {} };
return pipe.toBuffer(globalThis);
},
.buffer => |*buf| {
defer this.* = .{ .closed = {} };
const own = buf.takeSlice(bun.default_allocator) catch {
return globalThis.throwOutOfMemory();
};
return jsc.MarkedArrayBuffer.fromBytes(own, bun.default_allocator, .Uint8Array).toNodeBuffer(globalThis);
},
else => {
return .js_undefined;
},
}
}
};
const std = @import("std");
const bun = @import("bun");
const Environment = bun.Environment;
const Output = bun.Output;
const default_allocator = bun.default_allocator;
const CowString = bun.ptr.CowString;
const Stdio = bun.spawn.Stdio;
const jsc = bun.jsc;
const JSGlobalObject = jsc.JSGlobalObject;
const JSValue = jsc.JSValue;
const Subprocess = jsc.API.Subprocess;
const MaxBuf = Subprocess.MaxBuf;
const PipeReader = Subprocess.PipeReader;
const StdioResult = Subprocess.StdioResult;

View File

@@ -1,74 +0,0 @@
const ResourceUsage = @This();
pub const js = jsc.Codegen.JSResourceUsage;
pub const toJS = ResourceUsage.js.toJS;
pub const fromJS = ResourceUsage.js.fromJS;
pub const fromJSDirect = ResourceUsage.js.fromJSDirect;
rusage: Rusage,
pub fn create(rusage: *const Rusage, globalObject: *JSGlobalObject) bun.JSError!JSValue {
return bun.new(ResourceUsage, .{ .rusage = rusage.* }).toJS(globalObject);
}
pub fn getCPUTime(this: *ResourceUsage, globalObject: *JSGlobalObject) bun.JSError!JSValue {
var cpu = jsc.JSValue.createEmptyObjectWithNullPrototype(globalObject);
const rusage = this.rusage;
const usrTime = try JSValue.fromTimevalNoTruncate(globalObject, rusage.utime.usec, rusage.utime.sec);
const sysTime = try JSValue.fromTimevalNoTruncate(globalObject, rusage.stime.usec, rusage.stime.sec);
cpu.put(globalObject, jsc.ZigString.static("user"), usrTime);
cpu.put(globalObject, jsc.ZigString.static("system"), sysTime);
cpu.put(globalObject, jsc.ZigString.static("total"), JSValue.bigIntSum(globalObject, usrTime, sysTime));
return cpu;
}
pub fn getMaxRSS(this: *ResourceUsage, _: *JSGlobalObject) JSValue {
return jsc.JSValue.jsNumber(this.rusage.maxrss);
}
pub fn getSharedMemorySize(this: *ResourceUsage, _: *JSGlobalObject) JSValue {
return jsc.JSValue.jsNumber(this.rusage.ixrss);
}
pub fn getSwapCount(this: *ResourceUsage, _: *JSGlobalObject) JSValue {
return jsc.JSValue.jsNumber(this.rusage.nswap);
}
pub fn getOps(this: *ResourceUsage, globalObject: *JSGlobalObject) JSValue {
var ops = jsc.JSValue.createEmptyObjectWithNullPrototype(globalObject);
ops.put(globalObject, jsc.ZigString.static("in"), jsc.JSValue.jsNumber(this.rusage.inblock));
ops.put(globalObject, jsc.ZigString.static("out"), jsc.JSValue.jsNumber(this.rusage.oublock));
return ops;
}
pub fn getMessages(this: *ResourceUsage, globalObject: *JSGlobalObject) JSValue {
var msgs = jsc.JSValue.createEmptyObjectWithNullPrototype(globalObject);
msgs.put(globalObject, jsc.ZigString.static("sent"), jsc.JSValue.jsNumber(this.rusage.msgsnd));
msgs.put(globalObject, jsc.ZigString.static("received"), jsc.JSValue.jsNumber(this.rusage.msgrcv));
return msgs;
}
pub fn getSignalCount(this: *ResourceUsage, _: *JSGlobalObject) JSValue {
return jsc.JSValue.jsNumber(this.rusage.nsignals);
}
pub fn getContextSwitches(this: *ResourceUsage, globalObject: *JSGlobalObject) JSValue {
var ctx = jsc.JSValue.createEmptyObjectWithNullPrototype(globalObject);
ctx.put(globalObject, jsc.ZigString.static("voluntary"), jsc.JSValue.jsNumber(this.rusage.nvcsw));
ctx.put(globalObject, jsc.ZigString.static("involuntary"), jsc.JSValue.jsNumber(this.rusage.nivcsw));
return ctx;
}
pub fn finalize(this: *ResourceUsage) callconv(.C) void {
bun.destroy(this);
}
const bun = @import("bun");
const Rusage = bun.spawn.Rusage;
const jsc = bun.jsc;
const JSGlobalObject = jsc.JSGlobalObject;
const JSValue = jsc.JSValue;

View File

@@ -1,139 +0,0 @@
pub fn NewStaticPipeWriter(comptime ProcessType: type) type {
return struct {
const This = @This();
ref_count: WriterRefCount,
writer: IOWriter = .{},
stdio_result: StdioResult,
source: Source = .{ .detached = {} },
process: *ProcessType = undefined,
event_loop: jsc.EventLoopHandle,
buffer: []const u8 = "",
// It seems there is a bug in the Zig compiler. We'll get back to this one later
const WriterRefCount = bun.ptr.RefCount(@This(), "ref_count", _deinit, .{});
pub const ref = WriterRefCount.ref;
pub const deref = WriterRefCount.deref;
const print = bun.Output.scoped(.StaticPipeWriter, .visible);
pub const IOWriter = bun.io.BufferedWriter(@This(), struct {
pub const onWritable = null;
pub const getBuffer = This.getBuffer;
pub const onClose = This.onClose;
pub const onError = This.onError;
pub const onWrite = This.onWrite;
});
pub const Poll = IOWriter;
pub fn updateRef(this: *This, add: bool) void {
this.writer.updateRef(this.event_loop, add);
}
pub fn getBuffer(this: *This) []const u8 {
return this.buffer;
}
pub fn close(this: *This) void {
log("StaticPipeWriter(0x{x}) close()", .{@intFromPtr(this)});
this.writer.close();
}
pub fn flush(this: *This) void {
if (this.buffer.len > 0)
this.writer.write();
}
pub fn create(event_loop: anytype, subprocess: *ProcessType, result: StdioResult, source: Source) *This {
const this = bun.new(This, .{
.ref_count = .init(),
.event_loop = jsc.EventLoopHandle.init(event_loop),
.process = subprocess,
.stdio_result = result,
.source = source,
});
if (Environment.isWindows) {
this.writer.setPipe(this.stdio_result.buffer);
}
this.writer.setParent(this);
return this;
}
pub fn start(this: *This) bun.sys.Maybe(void) {
log("StaticPipeWriter(0x{x}) start()", .{@intFromPtr(this)});
this.ref();
this.buffer = this.source.slice();
if (Environment.isWindows) {
return this.writer.startWithCurrentPipe();
}
switch (this.writer.start(this.stdio_result.?, true)) {
.err => |err| {
return .{ .err = err };
},
.result => {
if (comptime Environment.isPosix) {
const poll = this.writer.handle.poll;
poll.flags.insert(.socket);
}
return .success;
},
}
}
pub fn onWrite(this: *This, amount: usize, status: bun.io.WriteStatus) void {
log("StaticPipeWriter(0x{x}) onWrite(amount={d} {})", .{ @intFromPtr(this), amount, status });
this.buffer = this.buffer[@min(amount, this.buffer.len)..];
if (status == .end_of_file or this.buffer.len == 0) {
this.writer.close();
}
}
pub fn onError(this: *This, err: bun.sys.Error) void {
log("StaticPipeWriter(0x{x}) onError(err={any})", .{ @intFromPtr(this), err });
this.source.detach();
}
pub fn onClose(this: *This) void {
log("StaticPipeWriter(0x{x}) onClose()", .{@intFromPtr(this)});
this.source.detach();
this.process.onCloseIO(.stdin);
}
fn _deinit(this: *This) void {
this.writer.end();
this.source.detach();
bun.destroy(this);
}
pub fn memoryCost(this: *const This) usize {
return @sizeOf(@This()) + this.source.memoryCost() + this.writer.memoryCost();
}
pub fn loop(this: *This) *uws.Loop {
return this.event_loop.loop();
}
pub fn watch(this: *This) void {
if (this.buffer.len > 0) {
this.writer.watch();
}
}
pub fn eventLoop(this: *This) jsc.EventLoopHandle {
return this.event_loop;
}
};
}
const log = Output.scoped(.StaticPipeWriter, .hidden);
const bun = @import("bun");
const Environment = bun.Environment;
const Output = bun.Output;
const jsc = bun.jsc;
const uws = bun.uws;
const Subprocess = jsc.API.Subprocess;
const Source = Subprocess.Source;
const StdioResult = Subprocess.StdioResult;

View File

@@ -1,225 +0,0 @@
const PipeReader = @This();
const RefCount = bun.ptr.RefCount(@This(), "ref_count", PipeReader.deinit, .{});
pub const ref = PipeReader.RefCount.ref;
pub const deref = PipeReader.RefCount.deref;
reader: IOReader = undefined,
process: ?*Subprocess = null,
event_loop: *jsc.EventLoop = undefined,
ref_count: PipeReader.RefCount,
state: union(enum) {
pending: void,
done: []u8,
err: bun.sys.Error,
} = .{ .pending = {} },
stdio_result: StdioResult,
pub const IOReader = bun.io.BufferedReader;
pub const Poll = IOReader;
pub fn memoryCost(this: *const PipeReader) usize {
return this.reader.memoryCost();
}
pub fn hasPendingActivity(this: *const PipeReader) bool {
if (this.state == .pending)
return true;
return this.reader.hasPendingActivity();
}
pub fn detach(this: *PipeReader) void {
this.process = null;
this.deref();
}
pub fn create(event_loop: *jsc.EventLoop, process: *Subprocess, result: StdioResult, limit: ?*MaxBuf) *PipeReader {
var this = bun.new(PipeReader, .{
.ref_count = .init(),
.process = process,
.reader = IOReader.init(@This()),
.event_loop = event_loop,
.stdio_result = result,
});
MaxBuf.addToPipereader(limit, &this.reader.maxbuf);
if (Environment.isWindows) {
this.reader.source = .{ .pipe = this.stdio_result.buffer };
}
this.reader.setParent(this);
return this;
}
pub fn readAll(this: *PipeReader) void {
if (this.state == .pending)
this.reader.read();
}
pub fn start(this: *PipeReader, process: *Subprocess, event_loop: *jsc.EventLoop) bun.sys.Maybe(void) {
this.ref();
this.process = process;
this.event_loop = event_loop;
if (Environment.isWindows) {
return this.reader.startWithCurrentPipe();
}
switch (this.reader.start(this.stdio_result.?, true)) {
.err => |err| {
return .{ .err = err };
},
.result => {
if (comptime Environment.isPosix) {
const poll = this.reader.handle.poll;
poll.flags.insert(.socket);
this.reader.flags.socket = true;
this.reader.flags.nonblocking = true;
this.reader.flags.pollable = true;
poll.flags.insert(.nonblocking);
}
return .success;
},
}
}
pub const toJS = toReadableStream;
pub fn onReaderDone(this: *PipeReader) void {
const owned = this.toOwnedSlice();
this.state = .{ .done = owned };
if (this.process) |process| {
this.process = null;
process.onCloseIO(this.kind(process));
this.deref();
}
}
pub fn kind(reader: *const PipeReader, process: *const Subprocess) StdioKind {
if (process.stdout == .pipe and process.stdout.pipe == reader) {
return .stdout;
}
if (process.stderr == .pipe and process.stderr.pipe == reader) {
return .stderr;
}
@panic("We should be either stdout or stderr");
}
pub fn toOwnedSlice(this: *PipeReader) []u8 {
if (this.state == .done) {
return this.state.done;
}
// we do not use .toOwnedSlice() because we don't want to reallocate memory.
const out = this.reader._buffer;
this.reader._buffer.items = &.{};
this.reader._buffer.capacity = 0;
if (out.capacity > 0 and out.items.len == 0) {
out.deinit();
return &.{};
}
return out.items;
}
pub fn updateRef(this: *PipeReader, add: bool) void {
this.reader.updateRef(add);
}
pub fn watch(this: *PipeReader) void {
if (!this.reader.isDone())
this.reader.watch();
}
pub fn toReadableStream(this: *PipeReader, globalObject: *jsc.JSGlobalObject) bun.JSError!jsc.JSValue {
defer this.detach();
switch (this.state) {
.pending => {
const stream = jsc.WebCore.ReadableStream.fromPipe(globalObject, this, &this.reader);
this.state = .{ .done = &.{} };
return stream;
},
.done => |bytes| {
this.state = .{ .done = &.{} };
return jsc.WebCore.ReadableStream.fromOwnedSlice(globalObject, bytes, 0);
},
.err => |err| {
_ = err;
const empty = try jsc.WebCore.ReadableStream.empty(globalObject);
jsc.WebCore.ReadableStream.cancel(&(try jsc.WebCore.ReadableStream.fromJS(empty, globalObject)).?, globalObject);
return empty;
},
}
}
pub fn toBuffer(this: *PipeReader, globalThis: *jsc.JSGlobalObject) jsc.JSValue {
switch (this.state) {
.done => |bytes| {
defer this.state = .{ .done = &.{} };
return jsc.MarkedArrayBuffer.fromBytes(bytes, bun.default_allocator, .Uint8Array).toNodeBuffer(globalThis);
},
else => {
return .js_undefined;
},
}
}
pub fn onReaderError(this: *PipeReader, err: bun.sys.Error) void {
if (this.state == .done) {
bun.default_allocator.free(this.state.done);
}
this.state = .{ .err = err };
if (this.process) |process|
process.onCloseIO(this.kind(process));
}
pub fn close(this: *PipeReader) void {
switch (this.state) {
.pending => {
this.reader.close();
},
.done => {},
.err => {},
}
}
pub fn eventLoop(this: *PipeReader) *jsc.EventLoop {
return this.event_loop;
}
pub fn loop(this: *PipeReader) *uws.Loop {
return this.event_loop.virtual_machine.uwsLoop();
}
fn deinit(this: *PipeReader) void {
if (comptime Environment.isPosix) {
bun.assert(this.reader.isDone());
}
if (comptime Environment.isWindows) {
bun.assert(this.reader.source == null or this.reader.source.?.isClosed());
}
if (this.state == .done) {
bun.default_allocator.free(this.state.done);
}
this.reader.deinit();
bun.destroy(this);
}
const bun = @import("bun");
const Environment = bun.Environment;
const default_allocator = bun.default_allocator;
const uws = bun.uws;
const jsc = bun.jsc;
const JSGlobalObject = jsc.JSGlobalObject;
const JSValue = jsc.JSValue;
const Subprocess = jsc.API.Subprocess;
const MaxBuf = Subprocess.MaxBuf;
const StdioKind = Subprocess.StdioKind;
const StdioResult = Subprocess.StdioResult;

View File

@@ -1,334 +0,0 @@
pub const Writable = union(enum) {
pipe: *jsc.WebCore.FileSink,
fd: bun.FileDescriptor,
buffer: *StaticPipeWriter,
memfd: bun.FileDescriptor,
inherit: void,
ignore: void,
pub fn memoryCost(this: *const Writable) usize {
return switch (this.*) {
.pipe => |pipe| pipe.memoryCost(),
.buffer => |buffer| buffer.memoryCost(),
// TODO: memfd
else => 0,
};
}
pub fn hasPendingActivity(this: *const Writable) bool {
return switch (this.*) {
.pipe => false,
// we mark them as .ignore when they are closed, so this must be true
.buffer => true,
else => false,
};
}
pub fn ref(this: *Writable) void {
switch (this.*) {
.pipe => {
this.pipe.updateRef(true);
},
.buffer => {
this.buffer.updateRef(true);
},
else => {},
}
}
pub fn unref(this: *Writable) void {
switch (this.*) {
.pipe => {
this.pipe.updateRef(false);
},
.buffer => {
this.buffer.updateRef(false);
},
else => {},
}
}
// When the stream has closed we need to be notified to prevent a use-after-free
// We can test for this use-after-free by enabling hot module reloading on a file and then saving it twice
pub fn onClose(this: *Writable, _: ?bun.sys.Error) void {
const process: *Subprocess = @fieldParentPtr("stdin", this);
if (process.this_jsvalue != .zero) {
if (js.stdinGetCached(process.this_jsvalue)) |existing_value| {
jsc.WebCore.FileSink.JSSink.setDestroyCallback(existing_value, 0);
}
}
switch (this.*) {
.buffer => {
this.buffer.deref();
},
.pipe => {
this.pipe.deref();
},
else => {},
}
process.onStdinDestroyed();
this.* = .{
.ignore = {},
};
}
pub fn onReady(_: *Writable, _: ?jsc.WebCore.Blob.SizeType, _: ?jsc.WebCore.Blob.SizeType) void {}
pub fn onStart(_: *Writable) void {}
pub fn init(
stdio: *Stdio,
event_loop: *jsc.EventLoop,
subprocess: *Subprocess,
result: StdioResult,
promise_for_stream: *jsc.JSValue,
) !Writable {
Subprocess.assertStdioResult(result);
if (Environment.isWindows) {
switch (stdio.*) {
.pipe, .readable_stream => {
if (result == .buffer) {
const pipe = jsc.WebCore.FileSink.createWithPipe(event_loop, result.buffer);
switch (pipe.writer.startWithCurrentPipe()) {
.result => {},
.err => |err| {
_ = err; // autofix
pipe.deref();
if (stdio.* == .readable_stream) {
stdio.readable_stream.cancel(event_loop.global);
}
return error.UnexpectedCreatingStdin;
},
}
pipe.writer.setParent(pipe);
subprocess.weak_file_sink_stdin_ptr = pipe;
subprocess.ref();
subprocess.flags.deref_on_stdin_destroyed = true;
subprocess.flags.has_stdin_destructor_called = false;
if (stdio.* == .readable_stream) {
const assign_result = pipe.assignToStream(&stdio.readable_stream, event_loop.global);
if (assign_result.toError()) |err| {
pipe.deref();
subprocess.deref();
return event_loop.global.throwValue(err);
}
promise_for_stream.* = assign_result;
}
return Writable{
.pipe = pipe,
};
}
return Writable{ .inherit = {} };
},
.blob => |blob| {
return Writable{
.buffer = StaticPipeWriter.create(event_loop, subprocess, result, .{ .blob = blob }),
};
},
.array_buffer => |array_buffer| {
return Writable{
.buffer = StaticPipeWriter.create(event_loop, subprocess, result, .{ .array_buffer = array_buffer }),
};
},
.fd => |fd| {
return Writable{ .fd = fd };
},
.dup2 => |dup2| {
return Writable{ .fd = dup2.to.toFd() };
},
.inherit => {
return Writable{ .inherit = {} };
},
.memfd, .path, .ignore => {
return Writable{ .ignore = {} };
},
.ipc, .capture => {
return Writable{ .ignore = {} };
},
}
}
if (comptime Environment.isPosix) {
if (stdio.* == .pipe) {
_ = bun.sys.setNonblocking(result.?);
}
}
switch (stdio.*) {
.dup2 => @panic("TODO dup2 stdio"),
.pipe, .readable_stream => {
const pipe = jsc.WebCore.FileSink.create(event_loop, result.?);
switch (pipe.writer.start(pipe.fd, true)) {
.result => {},
.err => |err| {
_ = err; // autofix
pipe.deref();
if (stdio.* == .readable_stream) {
stdio.readable_stream.cancel(event_loop.global);
}
return error.UnexpectedCreatingStdin;
},
}
pipe.writer.handle.poll.flags.insert(.socket);
subprocess.weak_file_sink_stdin_ptr = pipe;
subprocess.ref();
subprocess.flags.has_stdin_destructor_called = false;
subprocess.flags.deref_on_stdin_destroyed = true;
if (stdio.* == .readable_stream) {
const assign_result = pipe.assignToStream(&stdio.readable_stream, event_loop.global);
if (assign_result.toError()) |err| {
pipe.deref();
subprocess.deref();
return event_loop.global.throwValue(err);
}
promise_for_stream.* = assign_result;
}
return Writable{
.pipe = pipe,
};
},
.blob => |blob| {
return Writable{
.buffer = StaticPipeWriter.create(event_loop, subprocess, result, .{ .blob = blob }),
};
},
.array_buffer => |array_buffer| {
return Writable{
.buffer = StaticPipeWriter.create(event_loop, subprocess, result, .{ .array_buffer = array_buffer }),
};
},
.memfd => |memfd| {
bun.assert(memfd != bun.invalid_fd);
return Writable{ .memfd = memfd };
},
.fd => {
return Writable{ .fd = result.? };
},
.inherit => {
return Writable{ .inherit = {} };
},
.path, .ignore => {
return Writable{ .ignore = {} };
},
.ipc, .capture => {
return Writable{ .ignore = {} };
},
}
}
pub fn toJS(this: *Writable, globalThis: *jsc.JSGlobalObject, subprocess: *Subprocess) JSValue {
return switch (this.*) {
.fd => |fd| fd.toJS(globalThis),
.memfd, .ignore => .js_undefined,
.buffer, .inherit => .js_undefined,
.pipe => |pipe| {
this.* = .{ .ignore = {} };
if (subprocess.process.hasExited() and !subprocess.flags.has_stdin_destructor_called) {
// onAttachedProcessExit() can call deref on the
// subprocess. Since we never called ref(), it would be
// unbalanced to do so, leading to a use-after-free.
// So, let's not do that.
// https://github.com/oven-sh/bun/pull/14092
bun.debugAssert(!subprocess.flags.deref_on_stdin_destroyed);
const debug_ref_count = if (Environment.isDebug) subprocess.ref_count else 0;
pipe.onAttachedProcessExit(&subprocess.process.status);
if (Environment.isDebug) {
bun.debugAssert(subprocess.ref_count.get() == debug_ref_count.get());
}
return pipe.toJS(globalThis);
} else {
subprocess.flags.has_stdin_destructor_called = false;
subprocess.weak_file_sink_stdin_ptr = pipe;
subprocess.ref();
subprocess.flags.deref_on_stdin_destroyed = true;
if (@intFromPtr(pipe.signal.ptr) == @intFromPtr(subprocess)) {
pipe.signal.clear();
}
return pipe.toJSWithDestructor(
globalThis,
jsc.WebCore.Sink.DestructorPtr.init(subprocess),
);
}
},
};
}
pub fn finalize(this: *Writable) void {
const subprocess: *Subprocess = @fieldParentPtr("stdin", this);
if (subprocess.this_jsvalue != .zero) {
if (jsc.Codegen.JSSubprocess.stdinGetCached(subprocess.this_jsvalue)) |existing_value| {
jsc.WebCore.FileSink.JSSink.setDestroyCallback(existing_value, 0);
}
}
return switch (this.*) {
.pipe => |pipe| {
if (pipe.signal.ptr == @as(*anyopaque, @ptrCast(this))) {
pipe.signal.clear();
}
pipe.deref();
this.* = .{ .ignore = {} };
},
.buffer => {
this.buffer.updateRef(false);
this.buffer.deref();
},
.memfd => |fd| {
fd.close();
this.* = .{ .ignore = {} };
},
.ignore => {},
.fd, .inherit => {},
};
}
pub fn close(this: *Writable) void {
switch (this.*) {
.pipe => |pipe| {
_ = pipe.end(null);
},
.memfd => |fd| {
fd.close();
this.* = .{ .ignore = {} };
},
.fd => {
this.* = .{ .ignore = {} };
},
.buffer => {
this.buffer.close();
},
.ignore => {},
.inherit => {},
}
}
};
const bun = @import("bun");
const Environment = bun.Environment;
const Stdio = bun.spawn.Stdio;
const jsc = bun.jsc;
const JSGlobalObject = jsc.JSGlobalObject;
const JSValue = jsc.JSValue;
const Subprocess = jsc.API.Subprocess;
const StaticPipeWriter = Subprocess.StaticPipeWriter;
const StdioResult = Subprocess.StdioResult;
const js = Subprocess.js;

View File

@@ -693,12 +693,12 @@ pub const JSPasswordObject = struct {
if (owned_hash.len == 0) {
bun.default_allocator.free(owned_password);
return jsc.JSPromise.resolvedPromiseValue(globalObject, .false);
return jsc.JSPromise.resolvedPromiseValue(globalObject, jsc.JSValue.jsBoolean(false));
}
if (owned_password.len == 0) {
bun.default_allocator.free(owned_hash);
return jsc.JSPromise.resolvedPromiseValue(globalObject, .false);
return jsc.JSPromise.resolvedPromiseValue(globalObject, jsc.JSValue.jsBoolean(false));
}
return verify(globalObject, owned_password, owned_hash, algorithm, false);
@@ -743,11 +743,11 @@ pub const JSPasswordObject = struct {
defer hash_.deinit();
if (hash_.slice().len == 0) {
return .false;
return jsc.JSValue.jsBoolean(false);
}
if (password.slice().len == 0) {
return .false;
return jsc.JSValue.jsBoolean(false);
}
return verify(globalObject, password.slice(), hash_.slice(), algorithm, true);

View File

@@ -60,19 +60,12 @@ pub const HTMLRewriter = struct {
listener: JSValue,
) bun.JSError!JSValue {
const selector_slice = std.fmt.allocPrint(bun.default_allocator, "{}", .{selector_name}) catch bun.outOfMemory();
defer bun.default_allocator.free(selector_slice);
var selector = LOLHTML.HTMLSelector.parse(selector_slice) catch
return global.throwValue(createLOLHTMLError(global));
errdefer selector.deinit();
return createLOLHTMLError(global);
const handler_ = try ElementHandler.init(global, listener);
const handler = bun.default_allocator.create(ElementHandler) catch bun.outOfMemory();
handler.* = handler_;
errdefer {
handler.deinit();
bun.default_allocator.destroy(handler);
}
this.builder.addElementContentHandlers(
selector,
@@ -98,7 +91,8 @@ pub const HTMLRewriter = struct {
else
null,
) catch {
return global.throwValue(createLOLHTMLError(global));
selector.deinit();
return createLOLHTMLError(global);
};
this.context.selectors.append(bun.default_allocator, selector) catch bun.outOfMemory();
@@ -116,10 +110,6 @@ pub const HTMLRewriter = struct {
const handler = bun.default_allocator.create(DocumentHandler) catch bun.outOfMemory();
handler.* = handler_;
errdefer {
handler.deinit();
bun.default_allocator.destroy(handler);
}
// If this fails, subsequent calls to write or end should throw
this.builder.addDocumentContentHandlers(
@@ -893,11 +883,6 @@ fn HandlerCallback(
wrapper.deref();
}
// Use a CatchScope to properly handle exceptions from the JavaScript callback
var scope: bun.jsc.CatchScope = undefined;
scope.init(this.global, @src());
defer scope.deinit();
const result = @field(this, callback_name).?.call(
this.global,
if (comptime @hasField(HandlerType, "thisObject"))
@@ -906,36 +891,10 @@ fn HandlerCallback(
JSValue.zero,
&.{wrapper.toJS(this.global)},
) catch {
// If there's an exception in the scope, capture it for later retrieval
if (scope.exception()) |exc| {
const exc_value = JSValue.fromCell(exc);
// Store the exception in the VM's unhandled rejection capture mechanism
// if it's available (this is the same mechanism used by BufferOutputSink)
if (this.global.bunVM().unhandled_pending_rejection_to_capture) |err_ptr| {
err_ptr.* = exc_value;
exc_value.protect();
}
}
// Clear the exception from the scope to prevent assertion failures
scope.clearException();
// Return true to indicate failure to LOLHTML, which will cause the write
// operation to fail and the error handling logic to take over.
// If there's an error, we'll propagate it to the caller.
return true;
};
// Check if there's an exception that was thrown but not caught by the error union
if (scope.exception()) |exc| {
const exc_value = JSValue.fromCell(exc);
// Store the exception in the VM's unhandled rejection capture mechanism
if (this.global.bunVM().unhandled_pending_rejection_to_capture) |err_ptr| {
err_ptr.* = exc_value;
exc_value.protect();
}
// Clear the exception to prevent assertion failures
scope.clearException();
return true;
}
if (!result.isUndefinedOrNull()) {
if (result.isError() or result.isAggregateError(this.global)) {
return true;
@@ -1642,19 +1601,19 @@ pub const AttributeIterator = struct {
const value_label = jsc.ZigString.static("value");
if (this.iterator == null) {
return JSValue.createObject2(globalObject, done_label, value_label, .true, .js_undefined);
return JSValue.createObject2(globalObject, done_label, value_label, JSValue.jsBoolean(true), .js_undefined);
}
var attribute = this.iterator.?.next() orelse {
this.iterator.?.deinit();
this.iterator = null;
return JSValue.createObject2(globalObject, done_label, value_label, .true, .js_undefined);
return JSValue.createObject2(globalObject, done_label, value_label, JSValue.jsBoolean(true), .js_undefined);
};
const value = attribute.value();
const name = attribute.name();
return JSValue.createObject2(globalObject, done_label, value_label, .false, try bun.String.toJSArray(
return JSValue.createObject2(globalObject, done_label, value_label, JSValue.jsBoolean(false), try bun.String.toJSArray(
globalObject,
&[_]bun.String{
name.toString(),
@@ -1741,7 +1700,7 @@ pub const Element = struct {
/// Returns a boolean indicating whether an attribute exists on the element.
pub fn hasAttribute_(this: *Element, global: *JSGlobalObject, name: ZigString) JSValue {
if (this.element == null)
return .false;
return JSValue.jsBoolean(false);
var slice = name.toSlice(bun.default_allocator);
defer slice.deinit();

View File

@@ -0,0 +1,85 @@
import { define } from "../../codegen/class-definitions";
export default [
define({
name: "PostgresSQLConnection",
construct: true,
finalize: true,
configurable: false,
hasPendingActivity: true,
klass: {
// escapeString: {
// fn: "escapeString",
// },
// escapeIdentifier: {
// fn: "escapeIdentifier",
// },
},
JSType: "0b11101110",
proto: {
close: {
fn: "doClose",
},
connected: {
getter: "getConnected",
},
ref: {
fn: "doRef",
},
unref: {
fn: "doUnref",
},
flush: {
fn: "doFlush",
},
queries: {
getter: "getQueries",
this: true,
},
onconnect: {
getter: "getOnConnect",
setter: "setOnConnect",
this: true,
},
onclose: {
getter: "getOnClose",
setter: "setOnClose",
this: true,
},
},
values: ["onconnect", "onclose", "queries"],
}),
define({
name: "PostgresSQLQuery",
construct: true,
finalize: true,
configurable: false,
JSType: "0b11101110",
klass: {},
proto: {
run: {
fn: "doRun",
length: 2,
},
cancel: {
fn: "doCancel",
length: 0,
},
done: {
fn: "doDone",
length: 0,
},
setMode: {
fn: "setMode",
length: 1,
},
setPendingValue: {
fn: "setPendingValue",
length: 1,
},
},
values: ["pendingValue", "target", "columns", "binding"],
estimatedSize: true,
}),
];

View File

@@ -709,9 +709,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
return globalThis.throw("publish requires a non-empty topic", .{});
}
// https://github.com/ziglang/zig/issues/24563
const compress_js = compress_value orelse .true;
const compress = compress_js.toBoolean();
const compress = (compress_value orelse JSValue.jsBoolean(true)).toBoolean();
if (message_value.asArrayBuffer(globalThis)) |buffer| {
return JSValue.jsNumber(
@@ -752,12 +750,12 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
}
if (this.flags.terminated) {
return .false;
return JSValue.jsBoolean(false);
}
if (object.as(NodeHTTPResponse)) |nodeHttpResponse| {
if (nodeHttpResponse.flags.ended or nodeHttpResponse.flags.socket_closed) {
return .false;
return .jsBoolean(false);
}
var data_value = jsc.JSValue.zero;
@@ -841,14 +839,14 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
return globalThis.throwInvalidArguments("upgrade requires a Request object", .{});
};
var upgrader = request.request_context.get(RequestContext) orelse return .false;
var upgrader = request.request_context.get(RequestContext) orelse return .jsBoolean(false);
if (upgrader.isAbortedOrEnded()) {
return .false;
return .jsBoolean(false);
}
if (upgrader.upgrade_context == null or @intFromPtr(upgrader.upgrade_context) == std.math.maxInt(usize)) {
return .false;
return .jsBoolean(false);
}
const resp = upgrader.resp.?;
@@ -880,7 +878,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
}
if (sec_websocket_key_str.len == 0) {
return .false;
return .jsBoolean(false);
}
if (sec_websocket_protocol.len > 0) {
@@ -1004,7 +1002,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d
ctx,
);
return .true;
return .jsBoolean(true);
}
pub fn onReloadFromZig(this: *ThisServer, new_config: *ServerConfig, globalThis: *jsc.JSGlobalObject) void {

View File

@@ -122,37 +122,13 @@ pub fn getServerSocketValue(this: *NodeHTTPResponse) jsc.JSValue {
pub fn pauseSocket(this: *NodeHTTPResponse) void {
log("pauseSocket", .{});
if (this.flags.socket_closed or this.flags.upgraded) {
return;
}
this.raw_response.pause();
}
pub fn resumeSocket(this: *NodeHTTPResponse) void {
log("resumeSocket", .{});
if (this.flags.socket_closed or this.flags.upgraded) {
return;
}
this.raw_response.@"resume"();
}
const OnBeforeOpen = struct {
this: *NodeHTTPResponse,
socketValue: jsc.JSValue,
globalObject: *jsc.JSGlobalObject,
pub fn onBeforeOpen(ctx: *OnBeforeOpen, js_websocket: JSValue, socket: *uws.RawWebSocket) void {
Bun__setNodeHTTPServerSocketUsSocketValue(ctx.socketValue, socket.asSocket());
ServerWebSocket.js.gc.socket.set(js_websocket, ctx.globalObject, ctx.socketValue);
ctx.this.flags.upgraded = true;
defer ctx.this.js_ref.unref(ctx.globalObject.bunVM());
switch (ctx.this.raw_response) {
.SSL => ctx.this.raw_response = uws.AnyResponse.init(uws.NewApp(true).Response.castRes(@alignCast(@ptrCast(socket)))),
.TCP => ctx.this.raw_response = uws.AnyResponse.init(uws.NewApp(false).Response.castRes(@alignCast(@ptrCast(socket)))),
}
}
};
pub fn upgrade(this: *NodeHTTPResponse, data_value: JSValue, sec_websocket_protocol: ZigString, sec_websocket_extensions: ZigString) bool {
const upgrade_ctx = this.upgrade_context.context orelse return false;
const ws_handler = this.server.webSocketHandler() orelse return false;
@@ -173,18 +149,61 @@ pub fn upgrade(this: *NodeHTTPResponse, data_value: JSValue, sec_websocket_proto
.this_value = data_value,
});
var new_socket: ?*uws.Socket = null;
defer if (new_socket) |socket| {
this.flags.upgraded = true;
Bun__setNodeHTTPServerSocketUsSocketValue(socketValue, socket);
ServerWebSocket.js.socketSetCached(ws.getThisValue(), ws_handler.globalObject, socketValue);
defer this.js_ref.unref(jsc.VirtualMachine.get());
switch (this.raw_response) {
.SSL => this.raw_response = uws.AnyResponse.init(uws.NewApp(true).Response.castRes(@alignCast(@ptrCast(socket)))),
.TCP => this.raw_response = uws.AnyResponse.init(uws.NewApp(false).Response.castRes(@alignCast(@ptrCast(socket)))),
}
};
if (this.upgrade_context.request) |request| {
this.upgrade_context = .{};
var sec_websocket_protocol_str: ?ZigString.Slice = null;
var sec_websocket_extensions_str: ?ZigString.Slice = null;
const sec_websocket_protocol_value = brk: {
if (sec_websocket_protocol.isEmpty()) {
break :brk request.header("sec-websocket-protocol") orelse "";
}
sec_websocket_protocol_str = sec_websocket_protocol.toSlice(bun.default_allocator);
break :brk sec_websocket_protocol_str.?.slice();
};
const sec_websocket_extensions_value = brk: {
if (sec_websocket_extensions.isEmpty()) {
break :brk request.header("sec-websocket-extensions") orelse "";
}
sec_websocket_extensions_str = sec_websocket_protocol.toSlice(bun.default_allocator);
break :brk sec_websocket_extensions_str.?.slice();
};
defer {
if (sec_websocket_protocol_str) |str| str.deinit();
if (sec_websocket_extensions_str) |str| str.deinit();
}
new_socket = this.raw_response.upgrade(
*ServerWebSocket,
ws,
request.header("sec-websocket-key") orelse "",
sec_websocket_protocol_value,
sec_websocket_extensions_value,
upgrade_ctx,
);
return true;
}
var sec_websocket_protocol_str: ?ZigString.Slice = null;
defer if (sec_websocket_protocol_str) |*str| str.deinit();
var sec_websocket_extensions_str: ?ZigString.Slice = null;
defer if (sec_websocket_extensions_str) |*str| str.deinit();
const sec_websocket_protocol_value = brk: {
if (sec_websocket_protocol.isEmpty()) {
if (this.upgrade_context.request) |request| {
break :brk request.header("sec-websocket-protocol") orelse "";
} else {
break :brk this.upgrade_context.sec_websocket_protocol;
}
break :brk this.upgrade_context.sec_websocket_protocol;
}
sec_websocket_protocol_str = sec_websocket_protocol.toSlice(bun.default_allocator);
break :brk sec_websocket_protocol_str.?.slice();
@@ -192,48 +211,35 @@ pub fn upgrade(this: *NodeHTTPResponse, data_value: JSValue, sec_websocket_proto
const sec_websocket_extensions_value = brk: {
if (sec_websocket_extensions.isEmpty()) {
if (this.upgrade_context.request) |request| {
break :brk request.header("sec-websocket-extensions") orelse "";
} else {
break :brk this.upgrade_context.sec_websocket_extensions;
}
break :brk this.upgrade_context.sec_websocket_extensions;
}
sec_websocket_extensions_str = sec_websocket_extensions.toSlice(bun.default_allocator);
sec_websocket_extensions_str = sec_websocket_protocol.toSlice(bun.default_allocator);
break :brk sec_websocket_extensions_str.?.slice();
};
defer {
if (sec_websocket_protocol_str) |str| str.deinit();
if (sec_websocket_extensions_str) |str| str.deinit();
}
const websocket_key = if (this.upgrade_context.request) |request|
request.header("sec-websocket-key") orelse ""
else
this.upgrade_context.sec_websocket_key;
var on_before_open = OnBeforeOpen{
.this = this,
.socketValue = socketValue,
.globalObject = this.server.globalThis(),
};
var on_before_open_ptr = WebSocketServerContext.Handler.OnBeforeOpen{
.ctx = &on_before_open,
.callback = @ptrCast(&OnBeforeOpen.onBeforeOpen),
};
this.server.webSocketHandler().?.onBeforeOpen = &on_before_open_ptr;
_ = this.raw_response.upgrade(*ServerWebSocket, ws, websocket_key, sec_websocket_protocol_value, sec_websocket_extensions_value, upgrade_ctx);
new_socket = this.raw_response.upgrade(
*ServerWebSocket,
ws,
this.upgrade_context.sec_websocket_key,
sec_websocket_protocol_value,
sec_websocket_extensions_value,
upgrade_ctx,
);
return true;
}
pub fn maybeStopReadingBody(this: *NodeHTTPResponse, vm: *jsc.VirtualMachine, thisValue: jsc.JSValue) void {
this.upgrade_context.deinit(); // we can discard the upgrade context now
if ((this.flags.upgraded or this.flags.socket_closed or this.flags.ended) and
if ((this.flags.socket_closed or this.flags.ended) and
(this.body_read_ref.has or this.body_read_state == .pending) and
(!this.flags.hasCustomOnData or js.onDataGetCached(thisValue) == null))
{
const had_ref = this.body_read_ref.has;
if (!this.flags.upgraded and !this.flags.socket_closed) {
this.raw_response.clearOnData();
}
this.raw_response.clearOnData();
this.body_read_ref.unref(vm);
this.body_read_state = .done;
@@ -572,7 +578,7 @@ pub fn onTimeout(this: *NodeHTTPResponse, _: uws.AnyResponse) void {
pub fn doPause(this: *NodeHTTPResponse, _: *jsc.JSGlobalObject, _: *jsc.CallFrame, thisValue: jsc.JSValue) bun.JSError!jsc.JSValue {
log("doPause", .{});
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.ended or this.flags.upgraded) {
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.ended) {
return .false;
}
if (this.body_read_ref.has and js.onDataGetCached(thisValue) == null) {
@@ -602,11 +608,11 @@ fn drainBufferedRequestBodyFromPause(this: *NodeHTTPResponse, globalObject: *jsc
pub fn doResume(this: *NodeHTTPResponse, globalObject: *jsc.JSGlobalObject, _: *jsc.CallFrame) jsc.JSValue {
log("doResume", .{});
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.ended or this.flags.upgraded) {
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.ended) {
return .false;
}
var result: jsc.JSValue = .true;
var result = jsc.JSValue.true;
if (this.flags.is_data_buffered_during_pause) {
this.raw_response.clearOnData();
this.flags.is_data_buffered_during_pause = false;
@@ -665,7 +671,7 @@ pub export fn Bun__NodeHTTPRequest__onReject(globalObject: *jsc.JSGlobalObject,
defer this.deref();
if (!this.flags.request_has_completed and !this.flags.socket_closed and !this.flags.upgraded) {
if (!this.flags.request_has_completed and !this.flags.socket_closed) {
const this_value = this.getThisValue();
if (this_value != .zero) {
js.onAbortedSetCached(this_value, globalObject, .zero);
@@ -781,7 +787,7 @@ fn onDrain(this: *NodeHTTPResponse, offset: u64, response: uws.AnyResponse) bool
this.ref();
defer this.deref();
response.clearOnWritable();
if (this.flags.socket_closed or this.flags.request_has_completed or this.flags.upgraded) {
if (this.flags.socket_closed or this.flags.request_has_completed) {
// return false means we don't have anything to drain
return false;
}
@@ -957,14 +963,14 @@ pub fn getOnWritable(_: *NodeHTTPResponse, thisValue: jsc.JSValue, _: *jsc.JSGlo
}
pub fn getOnAbort(this: *NodeHTTPResponse, thisValue: jsc.JSValue, _: *jsc.JSGlobalObject) jsc.JSValue {
if (this.flags.socket_closed or this.flags.upgraded) {
if (this.flags.socket_closed) {
return .js_undefined;
}
return js.onAbortedGetCached(thisValue) orelse .js_undefined;
}
pub fn setOnAbort(this: *NodeHTTPResponse, thisValue: jsc.JSValue, globalObject: *jsc.JSGlobalObject, value: JSValue) void {
if (this.flags.socket_closed or this.flags.upgraded) {
if (this.flags.socket_closed) {
return;
}
@@ -996,7 +1002,7 @@ fn clearOnDataCallback(this: *NodeHTTPResponse, thisValue: jsc.JSValue, globalOb
if (thisValue != .zero) {
js.onDataSetCached(thisValue, globalObject, .js_undefined);
}
if (!this.flags.socket_closed and !this.flags.upgraded)
if (!this.flags.socket_closed)
this.raw_response.clearOnData();
if (this.body_read_state != .done) {
this.body_read_state = .done;
@@ -1005,7 +1011,7 @@ fn clearOnDataCallback(this: *NodeHTTPResponse, thisValue: jsc.JSValue, globalOb
}
pub fn setOnData(this: *NodeHTTPResponse, thisValue: jsc.JSValue, globalObject: *jsc.JSGlobalObject, value: JSValue) void {
if (value.isUndefined() or this.flags.ended or this.flags.socket_closed or this.body_read_state == .none or this.flags.is_data_buffered_during_pause_last or this.flags.upgraded) {
if (value.isUndefined() or this.flags.ended or this.flags.socket_closed or this.body_read_state == .none or this.flags.is_data_buffered_during_pause_last) {
js.onDataSetCached(thisValue, globalObject, .js_undefined);
defer {
if (this.body_read_ref.has) {
@@ -1014,7 +1020,7 @@ pub fn setOnData(this: *NodeHTTPResponse, thisValue: jsc.JSValue, globalObject:
}
switch (this.body_read_state) {
.pending, .done => {
if (!this.flags.request_has_completed and !this.flags.socket_closed and !this.flags.upgraded) {
if (!this.flags.request_has_completed and !this.flags.socket_closed) {
this.raw_response.clearOnData();
}
this.body_read_state = .done;
@@ -1042,7 +1048,7 @@ pub fn write(this: *NodeHTTPResponse, globalObject: *jsc.JSGlobalObject, callfra
}
pub fn flushHeaders(this: *NodeHTTPResponse, _: *jsc.JSGlobalObject, _: *jsc.CallFrame) bun.JSError!jsc.JSValue {
if (!this.flags.socket_closed and !this.flags.upgraded)
if (!this.flags.socket_closed)
this.raw_response.flushHeaders();
return .js_undefined;
@@ -1068,7 +1074,7 @@ fn handleCorked(globalObject: *jsc.JSGlobalObject, function: jsc.JSValue, result
}
pub fn setTimeout(this: *NodeHTTPResponse, seconds: u8) void {
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.upgraded) {
if (this.flags.request_has_completed or this.flags.socket_closed) {
return;
}
@@ -1081,7 +1087,7 @@ export fn NodeHTTPResponse__setTimeout(this: *NodeHTTPResponse, seconds: jsc.JSV
return false;
}
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.upgraded) {
if (this.flags.request_has_completed or this.flags.socket_closed) {
return false;
}
@@ -1099,7 +1105,7 @@ pub fn cork(this: *NodeHTTPResponse, globalObject: *jsc.JSGlobalObject, callfram
return globalObject.throwInvalidArgumentTypeValue("cork", "function", arguments[0]);
}
if (this.flags.request_has_completed or this.flags.socket_closed or this.flags.upgraded) {
if (this.flags.request_has_completed or this.flags.socket_closed) {
return globalObject.ERR(.STREAM_ALREADY_FINISHED, "Stream is already ended", .{}).throw();
}
@@ -1157,7 +1163,6 @@ pub export fn Bun__NodeHTTPResponse_setClosed(response: *NodeHTTPResponse) void
const string = []const u8;
const WebSocketServerContext = @import("./WebSocketServerContext.zig");
const std = @import("std");
const bun = @import("bun");

View File

@@ -915,32 +915,31 @@ pub fn fromJS(
args.ssl_config = null;
} else if (tls.jsType().isArray()) {
var value_iter = try tls.arrayIterator(global);
if (value_iter.len == 0) {
// Empty TLS array means no TLS - this is valid
} else {
while (try value_iter.next()) |item| {
var ssl_config = try SSLConfig.fromJS(vm, global, item) orelse {
if (global.hasException()) {
return error.JSError;
}
// Backwards-compatibility; we ignored empty tls objects.
continue;
};
if (args.ssl_config == null) {
args.ssl_config = ssl_config;
} else {
if (ssl_config.server_name == null or std.mem.span(ssl_config.server_name).len == 0) {
defer ssl_config.deinit();
return global.throwInvalidArguments("SNI tls object must have a serverName", .{});
}
if (args.sni == null) {
args.sni = bun.BabyList(SSLConfig).initCapacity(bun.default_allocator, value_iter.len - 1) catch bun.outOfMemory();
}
args.sni.?.push(bun.default_allocator, ssl_config) catch bun.outOfMemory();
if (value_iter.len == 1) {
return global.throwInvalidArguments("tls option expects at least 1 tls object", .{});
}
while (try value_iter.next()) |item| {
var ssl_config = try SSLConfig.fromJS(vm, global, item) orelse {
if (global.hasException()) {
return error.JSError;
}
// Backwards-compatibility; we ignored empty tls objects.
continue;
};
if (args.ssl_config == null) {
args.ssl_config = ssl_config;
} else {
if (ssl_config.server_name == null or std.mem.span(ssl_config.server_name).len == 0) {
defer ssl_config.deinit();
return global.throwInvalidArguments("SNI tls object must have a serverName", .{});
}
if (args.sni == null) {
args.sni = bun.BabyList(SSLConfig).initCapacity(bun.default_allocator, value_iter.len - 1) catch bun.outOfMemory();
}
args.sni.?.push(bun.default_allocator, ssl_config) catch bun.outOfMemory();
}
}
} else {

View File

@@ -73,20 +73,9 @@ pub fn onOpen(this: *ServerWebSocket, ws: uws.AnyWebSocket) void {
js.dataSetCached(current_this, globalObject, value_to_cache);
}
if (onOpenHandler.isEmptyOrUndefinedOrNull()) {
if (bun.take(&this.handler.onBeforeOpen)) |on_before_open| {
// Only create the "this" value if needed.
const this_value = this.getThisValue();
on_before_open.callback(on_before_open.ctx, this_value, ws.raw());
}
return;
}
if (onOpenHandler.isEmptyOrUndefinedOrNull()) return;
const this_value = this.getThisValue();
var args = [_]JSValue{this_value};
if (bun.take(&this.handler.onBeforeOpen)) |on_before_open| {
on_before_open.callback(on_before_open.ctx, this_value, ws.raw());
}
const loop = vm.eventLoop();
loop.enter();
@@ -1189,7 +1178,7 @@ pub fn subscribe(
}
if (this.isClosed()) {
return .true;
return JSValue.jsBoolean(true);
}
if (!args.ptr[0].isString()) {
@@ -1212,7 +1201,7 @@ pub fn unsubscribe(this: *ServerWebSocket, globalThis: *jsc.JSGlobalObject, call
}
if (this.isClosed()) {
return .true;
return JSValue.jsBoolean(true);
}
if (!args.ptr[0].isString()) {
@@ -1239,7 +1228,7 @@ pub fn isSubscribed(
}
if (this.isClosed()) {
return .false;
return JSValue.jsBoolean(false);
}
if (!args.ptr[0].isString()) {

View File

@@ -28,25 +28,12 @@ pub const Handler = struct {
globalObject: *jsc.JSGlobalObject = undefined,
active_connections: usize = 0,
/// Only used by NodeHTTPResponse.
///
/// Before we call into JavaScript and after the WebSocket is upgraded, we need to call a function in NodeHTTPResponse.
///
/// This is per-ServerWebSocket data, so it needs to be null'd on usage.
onBeforeOpen: ?*OnBeforeOpen = null,
/// used by publish()
flags: packed struct(u8) {
flags: packed struct(u2) {
ssl: bool = false,
publish_to_self: bool = false,
_: u6 = 0,
} = .{},
pub const OnBeforeOpen = struct {
ctx: *anyopaque,
callback: *const fn (*anyopaque, this_value: jsc.JSValue, socket: *uws.RawWebSocket) void,
};
pub fn runErrorCallback(this: *const Handler, vm: *jsc.VirtualMachine, globalObject: *jsc.JSGlobalObject, error_value: jsc.JSValue) void {
const onError = this.onError;
if (!onError.isEmptyOrUndefinedOrNull()) {

View File

@@ -459,7 +459,7 @@ export default [
finalize: true,
estimatedSize: true,
// inspectCustom: true,
structuredClone: { transferable: false, tag: 251, storable: false },
structuredClone: { transferable: false, tag: 251 },
JSType: "0b11101110",
klass: {
isBlockList: {

View File

@@ -27,5 +27,6 @@ export default [
constructNeedsThis: true,
memoryCost: true,
estimatedSize: true,
structuredClone: false,
}),
];

View File

@@ -1,94 +0,0 @@
import { define } from "../../codegen/class-definitions";
const types = ["PostgresSQL", "MySQL"];
const classes = [];
for (const type of types) {
classes.push(
define({
name: `${type}Connection`,
construct: true,
finalize: true,
configurable: false,
hasPendingActivity: true,
klass: {
// escapeString: {
// fn: "escapeString",
// },
// escapeIdentifier: {
// fn: "escapeIdentifier",
// },
},
JSType: "0b11101110",
proto: {
close: {
fn: "doClose",
},
connected: {
getter: "getConnected",
},
ref: {
fn: "doRef",
},
unref: {
fn: "doUnref",
},
flush: {
fn: "doFlush",
},
queries: {
getter: "getQueries",
this: true,
},
onconnect: {
getter: "getOnConnect",
setter: "setOnConnect",
this: true,
},
onclose: {
getter: "getOnClose",
setter: "setOnClose",
this: true,
},
},
values: ["onconnect", "onclose", "queries"],
}),
);
classes.push(
define({
name: `${type}Query`,
construct: true,
finalize: true,
configurable: false,
JSType: "0b11101110",
klass: {},
proto: {
run: {
fn: "doRun",
length: 2,
},
cancel: {
fn: "doCancel",
length: 0,
},
done: {
fn: "doDone",
length: 0,
},
setMode: {
fn: "setMode",
length: 1,
},
setPendingValue: {
fn: "setPendingValue",
length: 1,
},
},
values: ["pendingValue", "target", "columns", "binding"],
estimatedSize: true,
}),
);
}
export default classes;

View File

@@ -72,10 +72,6 @@ BUN_DECLARE_HOST_FUNCTION(Bun__fetchPreconnect);
BUN_DECLARE_HOST_FUNCTION(Bun__randomUUIDv7);
BUN_DECLARE_HOST_FUNCTION(Bun__randomUUIDv5);
namespace Bun {
JSC_DECLARE_HOST_FUNCTION(jsFunctionBunStripANSI);
}
using namespace JSC;
using namespace WebCore;
@@ -307,9 +303,6 @@ static JSValue defaultBunSQLObject(VM& vm, JSObject* bunObject)
auto scope = DECLARE_THROW_SCOPE(vm);
auto* globalObject = defaultGlobalObject(bunObject->globalObject());
JSValue sqlValue = globalObject->internalModuleRegistry()->requireId(globalObject, vm, InternalModuleRegistry::BunSql);
#if BUN_DEBUG
if (scope.exception()) globalObject->reportUncaughtExceptionAtEventLoop(globalObject, scope.exception());
#endif
RETURN_IF_EXCEPTION(scope, {});
RELEASE_AND_RETURN(scope, sqlValue.getObject()->get(globalObject, vm.propertyNames->defaultKeyword));
}
@@ -319,9 +312,6 @@ static JSValue constructBunSQLObject(VM& vm, JSObject* bunObject)
auto scope = DECLARE_THROW_SCOPE(vm);
auto* globalObject = defaultGlobalObject(bunObject->globalObject());
JSValue sqlValue = globalObject->internalModuleRegistry()->requireId(globalObject, vm, InternalModuleRegistry::BunSql);
#if BUN_DEBUG
if (scope.exception()) globalObject->reportUncaughtExceptionAtEventLoop(globalObject, scope.exception());
#endif
RETURN_IF_EXCEPTION(scope, {});
auto clientData = WebCore::clientData(vm);
RELEASE_AND_RETURN(scope, sqlValue.getObject()->get(globalObject, clientData->builtinNames().SQLPublicName()));
@@ -792,7 +782,6 @@ JSC_DEFINE_HOST_FUNCTION(functionFileURLToPath, (JSC::JSGlobalObject * globalObj
stdin BunObject_lazyPropCb_wrap_stdin DontDelete|PropertyCallback
stdout BunObject_lazyPropCb_wrap_stdout DontDelete|PropertyCallback
stringWidth Generated::BunObject::jsStringWidth DontDelete|Function 2
stripANSI jsFunctionBunStripANSI DontDelete|Function 1
unsafe BunObject_lazyPropCb_wrap_unsafe DontDelete|PropertyCallback
version constructBunVersion ReadOnly|DontDelete|PropertyCallback
which BunObject_callback_which DontDelete|Function 1

View File

@@ -1,6 +1,5 @@
#include "BunString.h"
#include "helpers.h"
#include "root.h"
#include "headers-handwritten.h"
@@ -280,71 +279,6 @@ BunString toStringView(StringView view)
};
}
// We don't want to ban atomiziation for tiny strings that are potentially going
// to appear as properties/identifiers in JS. So we should only do this for long
// strings that are unlikely to ever be atomized.
static constexpr unsigned int kMinCrossThreadShareableLength = 256;
bool isCrossThreadShareable(const WTF::String& string)
{
if (string.length() < kMinCrossThreadShareableLength)
return false;
const auto* impl = string.impl();
// 1) Never share AtomStringImpl/symbols - they have special thread-unsafe behavior
if (impl->isAtom() || impl->isSymbol())
return false;
// 2) Don't share slices
if (impl->bufferOwnership() == StringImpl::BufferSubstring)
return false;
return true;
}
Ref<WTF::StringImpl> toCrossThreadShareable(Ref<WTF::StringImpl> impl)
{
if (impl->isAtom() || impl->isSymbol())
return impl->isolatedCopy();
if (impl->bufferOwnership() == StringImpl::BufferSubstring)
return impl->isolatedCopy();
if (impl->length() < kMinCrossThreadShareableLength)
return impl->isolatedCopy();
// 3) Ensure we won't lazily touch hash/flags on the consumer thread
// Force hash computation on this thread before sharing
impl->hash();
impl->setNeverAtomize();
return impl;
}
WTF::String toCrossThreadShareable(const WTF::String& string)
{
if (string.length() < kMinCrossThreadShareableLength)
return string.isolatedCopy();
auto* impl = string.impl();
// 1) Never share AtomStringImpl/symbols - they have special thread-unsafe behavior
if (impl->isAtom() || impl->isSymbol())
return string.isolatedCopy();
// 2) Don't share slices
if (impl->bufferOwnership() == StringImpl::BufferSubstring)
return string.isolatedCopy();
// 3) Ensure we won't lazily touch hash/flags on the consumer thread
// Force hash computation on this thread before sharing
const_cast<StringImpl*>(impl)->hash();
const_cast<StringImpl*>(impl)->setNeverAtomize();
return string;
}
}
extern "C" JSC::EncodedJSValue BunString__toJS(JSC::JSGlobalObject* globalObject, const BunString* bunString)

View File

@@ -55,9 +55,4 @@ public:
return std::span(reinterpret_cast<const char*>(m_view.span8().data()), m_view.length());
}
};
bool isCrossThreadShareable(const WTF::String& string);
WTF::String toCrossThreadShareable(const WTF::String& string);
Ref<WTF::StringImpl> toCrossThreadShareable(Ref<WTF::StringImpl> impl);
}

View File

@@ -1,187 +0,0 @@
/*
* Copyright (C) 2011 Daniel Bates (dbates@intudata.com). All rights reserved.
* Copyright (c) 2012 Google, inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "TextEncoding.h"
#include <wtf/ASCIICType.h>
#include <wtf/Assertions.h>
#include <wtf/text/StringBuilder.h>
namespace PAL {
// See <http://en.wikipedia.org/wiki/Percent-encoding#Non-standard_implementations>.
struct Unicode16BitEscapeSequence {
enum { SequenceSize = 6 }; // e.g. %u26C4
static size_t findInString(StringView string, size_t startPosition) { return string.find("%u"_s, startPosition); }
static size_t findEndOfRun(StringView string, size_t startPosition, size_t endPosition)
{
size_t runEnd = startPosition;
while (endPosition - runEnd >= SequenceSize && string[runEnd] == '%' && string[runEnd + 1] == 'u'
&& isASCIIHexDigit(string[runEnd + 2]) && isASCIIHexDigit(string[runEnd + 3])
&& isASCIIHexDigit(string[runEnd + 4]) && isASCIIHexDigit(string[runEnd + 5])) {
runEnd += SequenceSize;
}
return runEnd;
}
static String decodeRun(StringView run, const TextEncoding&)
{
// Each %u-escape sequence represents a UTF-16 code unit.
// See <http://www.w3.org/International/iri-edit/draft-duerst-iri.html#anchor29>.
// For 16-bit escape sequences, we know that findEndOfRun() has given us a contiguous run of sequences
// without any intervening characters, so decode the run without additional checks.
auto numberOfSequences = run.length() / SequenceSize;
StringBuilder builder;
builder.reserveCapacity(numberOfSequences);
while (numberOfSequences--) {
char16_t codeUnit = (toASCIIHexValue(run[2]) << 12) | (toASCIIHexValue(run[3]) << 8) | (toASCIIHexValue(run[4]) << 4) | toASCIIHexValue(run[5]);
builder.append(codeUnit);
run = run.substring(SequenceSize);
}
return builder.toString();
}
};
struct URLEscapeSequence {
enum { SequenceSize = 3 }; // e.g. %41
static size_t findInString(StringView string, size_t startPosition) { return string.find('%', startPosition); }
static size_t findEndOfRun(StringView string, size_t startPosition, size_t endPosition)
{
// Make the simplifying assumption that supported encodings may have up to two unescaped characters
// in the range 0x40 - 0x7F as the trailing bytes of their sequences which need to be passed into the
// decoder as part of the run. In other words, we end the run at the first value outside of the
// 0x40 - 0x7F range, after two values in this range, or at a %-sign that does not introduce a valid
// escape sequence.
size_t runEnd = startPosition;
int numberOfTrailingCharacters = 0;
while (runEnd < endPosition) {
if (string[runEnd] == '%') {
if (endPosition - runEnd >= SequenceSize && isASCIIHexDigit(string[runEnd + 1]) && isASCIIHexDigit(string[runEnd + 2])) {
runEnd += SequenceSize;
numberOfTrailingCharacters = 0;
} else
break;
} else if (string[runEnd] >= 0x40 && string[runEnd] <= 0x7F && numberOfTrailingCharacters < 2) {
runEnd += 1;
numberOfTrailingCharacters += 1;
} else
break;
}
return runEnd;
}
static Vector<uint8_t, 512> decodeRun(StringView run)
{
// For URL escape sequences, we know that findEndOfRun() has given us a run where every %-sign introduces
// a valid escape sequence, but there may be characters between the sequences.
Vector<uint8_t, 512> buffer;
buffer.grow(run.length()); // Unescaping hex sequences only makes the length smaller.
size_t bufferIndex = 0;
while (!run.isEmpty()) {
if (run[0] == '%') {
buffer[bufferIndex++] = (toASCIIHexValue(run[1]) << 4) | toASCIIHexValue(run[2]);
run = run.substring(SequenceSize);
} else {
buffer[bufferIndex++] = run[0];
run = run.substring(1);
}
}
buffer.shrink(bufferIndex);
return buffer;
}
static String decodeRun(StringView run, const TextEncoding& encoding)
{
auto buffer = decodeRun(run);
if (!encoding.isValid())
return PAL::UTF8Encoding().decode(buffer.span());
return encoding.decode(buffer.span());
}
};
template<typename EscapeSequence>
String decodeEscapeSequences(StringView string, const TextEncoding& encoding)
{
StringBuilder result;
size_t length = string.length();
size_t decodedPosition = 0;
size_t searchPosition = 0;
size_t encodedRunPosition;
while ((encodedRunPosition = EscapeSequence::findInString(string, searchPosition)) != notFound) {
size_t encodedRunEnd = EscapeSequence::findEndOfRun(string, encodedRunPosition, length);
searchPosition = encodedRunEnd;
if (encodedRunEnd == encodedRunPosition) {
++searchPosition;
continue;
}
String decoded = EscapeSequence::decodeRun(string.substring(encodedRunPosition, encodedRunEnd - encodedRunPosition), encoding);
if (decoded.isEmpty())
continue;
result.append(string.substring(decodedPosition, encodedRunPosition - decodedPosition), decoded);
decodedPosition = encodedRunEnd;
}
result.append(string.substring(decodedPosition, length - decodedPosition));
return result.toString();
}
inline Vector<uint8_t> decodeURLEscapeSequencesAsData(StringView string)
{
Vector<uint8_t> result;
size_t decodedPosition = 0;
size_t searchPosition = 0;
while (true) {
size_t encodedRunPosition = URLEscapeSequence::findInString(string, searchPosition);
size_t encodedRunEnd = 0;
if (encodedRunPosition != notFound) {
encodedRunEnd = URLEscapeSequence::findEndOfRun(string, encodedRunPosition, string.length());
searchPosition = encodedRunEnd;
if (encodedRunEnd == encodedRunPosition) {
++searchPosition;
continue;
}
}
// Strings are encoded as requested.
result.appendVector(PAL::UTF8Encoding().encodeForURLParsing(string.substring(decodedPosition, encodedRunPosition - decodedPosition)));
if (encodedRunPosition == notFound)
return result;
// Bytes go through as-is.
auto decodedEscapeSequence = URLEscapeSequence::decodeRun(string.substring(encodedRunPosition, encodedRunEnd - encodedRunPosition));
ASSERT(!decodedEscapeSequence.isEmpty());
result.appendVector(decodedEscapeSequence);
decodedPosition = encodedRunEnd;
}
}
} // namespace PAL

File diff suppressed because it is too large Load Diff

View File

@@ -1,136 +0,0 @@
/*
* Copyright (C) 2020 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <algorithm>
#include <array>
#include <iterator>
#include <optional>
#include <unicode/umachine.h>
#include <utility>
namespace PAL {
const std::array<std::pair<uint16_t, char16_t>, 7724>& jis0208();
const std::array<std::pair<uint16_t, char16_t>, 6067>& jis0212();
const std::array<std::pair<uint16_t, char32_t>, 18590>& big5();
const std::array<std::pair<uint16_t, char16_t>, 17048>& eucKR();
const std::array<char16_t, 23940>& gb18030();
void checkEncodingTableInvariants();
// Functions for using sorted arrays of pairs as a map.
// FIXME: Consider moving these functions to StdLibExtras.h for uses other than encoding tables.
template<typename CollectionType> void sortByFirst(CollectionType&);
template<typename CollectionType> void stableSortByFirst(CollectionType&);
template<typename CollectionType> bool isSortedByFirst(const CollectionType&);
template<typename CollectionType> bool sortedFirstsAreUnique(const CollectionType&);
template<typename CollectionType, typename KeyType> static auto findFirstInSortedPairs(const CollectionType& sortedPairsCollection, const KeyType&) -> std::optional<decltype(std::begin(sortedPairsCollection)->second)>;
template<typename CollectionType, typename KeyType> static auto findInSortedPairs(const CollectionType& sortedPairsCollection, const KeyType&) -> std::span<std::remove_reference_t<decltype(*std::begin(sortedPairsCollection))>>;
#if !ASSERT_ENABLED
inline void checkEncodingTableInvariants() {}
#endif
struct CompareFirst {
template<typename TypeA, typename TypeB> bool operator()(const TypeA& a, const TypeB& b)
{
return a.first < b.first;
}
};
struct EqualFirst {
template<typename TypeA, typename TypeB> bool operator()(const TypeA& a, const TypeB& b)
{
return a.first == b.first;
}
};
struct CompareSecond {
template<typename TypeA, typename TypeB> bool operator()(const TypeA& a, const TypeB& b)
{
return a.second < b.second;
}
};
template<typename T> struct FirstAdapter {
const T& first;
};
template<typename T> FirstAdapter<T> makeFirstAdapter(const T& value)
{
return { value };
}
template<typename T> struct SecondAdapter {
const T& second;
};
template<typename T> SecondAdapter<T> makeSecondAdapter(const T& value)
{
return { value };
}
template<typename CollectionType> void sortByFirst(CollectionType& collection)
{
std::sort(std::begin(collection), std::end(collection), CompareFirst {});
}
template<typename CollectionType> void stableSortByFirst(CollectionType& collection)
{
std::stable_sort(std::begin(collection), std::end(collection), CompareFirst {});
}
template<typename CollectionType> bool isSortedByFirst(const CollectionType& collection)
{
return std::is_sorted(std::begin(collection), std::end(collection), CompareFirst {});
}
template<typename CollectionType> bool sortedFirstsAreUnique(const CollectionType& collection)
{
return std::adjacent_find(std::begin(collection), std::end(collection), EqualFirst {}) == std::end(collection);
}
template<typename CollectionType, typename KeyType> static auto findFirstInSortedPairs(const CollectionType& collection, const KeyType& key) -> std::optional<decltype(std::begin(collection)->second)>
{
if constexpr (std::is_integral_v<KeyType>) {
if (key != decltype(std::begin(collection)->first)(key))
return std::nullopt;
}
auto iterator = std::lower_bound(std::begin(collection), std::end(collection), makeFirstAdapter(key), CompareFirst {});
if (iterator == std::end(collection) || key < iterator->first)
return std::nullopt;
return iterator->second;
}
template<typename CollectionType, typename KeyType> static auto findInSortedPairs(const CollectionType& collection, const KeyType& key) -> std::span<std::remove_reference_t<decltype(*std::begin(collection))>>
{
if constexpr (std::is_integral_v<KeyType>) {
if (key != decltype(std::begin(collection)->first)(key))
return {};
}
return std::ranges::equal_range(collection, makeFirstAdapter(key), CompareFirst {});
}
}

View File

@@ -204,10 +204,6 @@ const errors: ErrorCodeMapping = [
["ERR_POSTGRES_UNSUPPORTED_BYTEA_FORMAT", TypeError, "PostgresError"],
["ERR_POSTGRES_UNSUPPORTED_INTEGER_SIZE", TypeError, "PostgresError"],
["ERR_POSTGRES_UNSUPPORTED_NUMERIC_FORMAT", TypeError, "PostgresError"],
["ERR_MYSQL_CONNECTION_CLOSED", Error, "MySQLError"],
["ERR_MYSQL_CONNECTION_TIMEOUT", Error, "MySQLError"],
["ERR_MYSQL_IDLE_TIMEOUT", Error, "MySQLError"],
["ERR_MYSQL_LIFETIME_TIMEOUT", Error, "MySQLError"],
["ERR_UNHANDLED_REJECTION", Error, "UnhandledPromiseRejection"],
["ERR_REQUIRE_ASYNC_MODULE", Error],
["ERR_S3_INVALID_ENDPOINT", Error],

View File

@@ -2,13 +2,12 @@
#include "headers-handwritten.h"
#include "BunBuiltinNames.h"
#include "WebCoreJSBuiltins.h"
#include "ZigGlobalObject.h"
extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue IPCSerialize(Zig::GlobalObject* global, JSC::EncodedJSValue message, JSC::EncodedJSValue handle)
extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue IPCSerialize(JSC::JSGlobalObject* global, JSC::EncodedJSValue message, JSC::EncodedJSValue handle)
{
auto& vm = JSC::getVM(global);
auto scope = DECLARE_THROW_SCOPE(vm);
JSC::JSFunction* serializeFunction = global->m_ipcSerializeFunction.getInitializedOnMainThread(global);
JSC::JSFunction* serializeFunction = JSC::JSFunction::create(vm, global, WebCore::ipcSerializeCodeGenerator(vm), global);
JSC::CallData callData = JSC::getCallData(serializeFunction);
JSC::MarkedArgumentBuffer args;
@@ -20,11 +19,11 @@ extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue IPCSerialize(Zig::G
return JSC::JSValue::encode(result);
}
extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue IPCParse(Zig::GlobalObject* global, JSC::EncodedJSValue target, JSC::EncodedJSValue serialized, JSC::EncodedJSValue fd)
extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue IPCParse(JSC::JSGlobalObject* global, JSC::EncodedJSValue target, JSC::EncodedJSValue serialized, JSC::EncodedJSValue fd)
{
auto& vm = JSC::getVM(global);
auto scope = DECLARE_THROW_SCOPE(vm);
JSC::JSFunction* parseFunction = global->m_ipcParseHandleFunction.getInitializedOnMainThread(global);
JSC::JSFunction* parseFunction = JSC::JSFunction::create(vm, global, WebCore::ipcParseHandleCodeGenerator(vm), global);
JSC::CallData callData = JSC::getCallData(parseFunction);
JSC::MarkedArgumentBuffer args;

View File

@@ -21,10 +21,6 @@ pub const JSGlobalObject = opaque {
JSGlobalObject__throwOutOfMemoryError(this);
return .zero;
}
pub fn gregorianDateTimeToMS(this: *jsc.JSGlobalObject, year: i32, month: i32, day: i32, hour: i32, minute: i32, second: i32, millisecond: i32) bun.JSError!f64 {
jsc.markBinding(@src());
return bun.cpp.Bun__gregorianDateTimeToMS(this, year, month, day, hour, minute, second, millisecond);
}
pub fn throwTODO(this: *JSGlobalObject, msg: []const u8) bun.JSError {
const err = this.createErrorInstance("{s}", .{msg});
@@ -671,40 +667,6 @@ pub const JSGlobalObject = opaque {
always_allow_zero: bool = false,
};
pub fn validateBigIntRange(this: *JSGlobalObject, value: JSValue, comptime T: type, default: T, comptime range: IntegerRange) bun.JSError!T {
if (value.isUndefined() or value == .zero) {
return 0;
}
const TypeInfo = @typeInfo(T);
if (TypeInfo != .int) {
@compileError("T must be an integer type");
}
const signed = TypeInfo.int.signedness == .signed;
const min_t = comptime @max(range.min, std.math.minInt(T));
const max_t = comptime @min(range.max, std.math.maxInt(T));
if (value.isBigInt()) {
if (signed) {
if (value.isBigIntInInt64Range(min_t, max_t)) {
return value.toInt64();
}
} else {
if (value.isBigIntInUInt64Range(min_t, max_t)) {
return value.toUInt64NoTruncate();
}
}
return this.ERR(.OUT_OF_RANGE, "The value is out of range. It must be >= {d} and <= {d}.", .{ min_t, max_t }).throw();
}
return try this.validateIntegerRange(value, T, default, .{
.min = comptime @max(min_t, jsc.MIN_SAFE_INTEGER),
.max = comptime @min(max_t, jsc.MAX_SAFE_INTEGER),
.field_name = range.field_name,
.always_allow_zero = range.always_allow_zero,
});
}
pub fn validateIntegerRange(this: *JSGlobalObject, value: JSValue, comptime T: type, default: T, comptime range: IntegerRange) bun.JSError!T {
if (value.isUndefined() or value == .zero) {
return default;

View File

@@ -1031,12 +1031,11 @@ JSC_DEFINE_CUSTOM_GETTER(jsMockFunctionGetter_protoImpl, (JSC::JSGlobalObject *
return JSValue::encode(jsUndefined());
}
extern "C" [[ZIG_EXPORT(zero_is_throw)]] JSC::EncodedJSValue JSMockFunction__getCalls(JSC::JSGlobalObject* globalThis, EncodedJSValue encodedValue)
extern "C" JSC::EncodedJSValue JSMockFunction__getCalls(EncodedJSValue encodedValue)
{
auto scope = DECLARE_THROW_SCOPE(globalThis->vm());
JSValue value = JSValue::decode(encodedValue);
if (auto* mock = tryJSDynamicCast<JSMockFunction*>(value)) {
RELEASE_AND_RETURN(scope, JSValue::encode(mock->getCalls()));
return JSValue::encode(mock->getCalls());
}
return encodedJSUndefined();
}

View File

@@ -33,13 +33,6 @@ pub const JSValue = enum(i64) {
return @as(JSValue, @enumFromInt(@as(i64, @bitCast(@intFromPtr(ptr)))));
}
pub fn isBigIntInUInt64Range(this: JSValue, min: u64, max: u64) bool {
return bun.cpp.JSC__isBigIntInUInt64Range(this, min, max);
}
pub fn isBigIntInInt64Range(this: JSValue, min: i64, max: i64) bool {
return bun.cpp.JSC__isBigIntInInt64Range(this, min, max);
}
pub fn coerceToInt32(this: JSValue, globalThis: *jsc.JSGlobalObject) bun.JSError!i32 {
return bun.cpp.JSC__JSValue__coerceToInt32(this, globalThis);
}
@@ -1575,7 +1568,7 @@ pub const JSValue = enum(i64) {
///
/// Returns null when the value is:
/// - JSValue.null
/// - .false
/// - JSValue.false
/// - .js_undefined
/// - an empty string
pub fn getStringish(this: JSValue, global: *JSGlobalObject, property: []const u8) bun.JSError!?bun.String {
@@ -2175,7 +2168,7 @@ pub const JSValue = enum(i64) {
return bun.jsc.fromJSHostCall(global, @src(), Bun__JSValue__deserialize, .{ global, bytes.ptr, bytes.len });
}
extern fn Bun__serializeJSValue(global: *jsc.JSGlobalObject, value: JSValue, flags: u8) SerializedScriptValue.External;
extern fn Bun__serializeJSValue(global: *jsc.JSGlobalObject, value: JSValue, forTransfer: bool) SerializedScriptValue.External;
extern fn Bun__SerializedScriptSlice__free(*anyopaque) void;
pub const SerializedScriptValue = struct {
@@ -2193,20 +2186,10 @@ pub const JSValue = enum(i64) {
}
};
pub const SerializedFlags = packed struct(u8) {
forCrossProcessTransfer: bool = false,
forStorage: bool = false,
_padding: u6 = 0,
};
/// Throws a JS exception and returns null if the serialization fails, otherwise returns a SerializedScriptValue.
/// Must be freed when you are done with the bytes.
pub inline fn serialize(this: JSValue, global: *JSGlobalObject, flags: SerializedFlags) bun.JSError!SerializedScriptValue {
var flags_u8: u8 = 0;
if (flags.forCrossProcessTransfer) flags_u8 |= 1 << 0;
if (flags.forStorage) flags_u8 |= 1 << 1;
const value = try bun.jsc.fromJSHostCallGeneric(global, @src(), Bun__serializeJSValue, .{ global, this, flags_u8 });
pub inline fn serialize(this: JSValue, global: *JSGlobalObject, forTransfer: bool) bun.JSError!SerializedScriptValue {
const value = try bun.jsc.fromJSHostCallGeneric(global, @src(), Bun__serializeJSValue, .{ global, this, forTransfer });
return .{ .data = value.bytes.?[0..value.size], .handle = value.handle.? };
}

View File

@@ -1037,11 +1037,6 @@ static void writeFetchHeadersToUWSResponse(WebCore::FetchHeaders& headers, uWS::
res->writeMark();
}
}
// Prevent automatic Date header insertion when user provides one
if (header.key == WebCore::HTTPHeaderName::Date) {
data->state |= uWS::HttpResponseData<isSSL>::HTTP_WROTE_DATE_HEADER;
}
writeResponseHeader<isSSL>(res, name, value);
}

View File

@@ -64,7 +64,6 @@ typedef union DataCellValue {
double number;
int32_t integer;
int64_t bigint;
uint64_t unsigned_bigint;
uint8_t boolean;
double date;
double date_with_time_zone;
@@ -91,7 +90,6 @@ enum class DataCellTag : uint8_t {
TypedArray = 11,
Raw = 12,
UnsignedInteger = 13,
UnsignedBigint = 14,
};
enum class BunResultMode : uint8_t {
@@ -163,9 +161,6 @@ static JSC::JSValue toJS(JSC::VM& vm, JSC::JSGlobalObject* globalObject, DataCel
case DataCellTag::Bigint:
return JSC::JSBigInt::createFrom(globalObject, cell.value.bigint);
break;
case DataCellTag::UnsignedBigint:
return JSC::JSBigInt::createFrom(globalObject, cell.value.unsigned_bigint);
break;
case DataCellTag::Boolean:
return jsBoolean(cell.value.boolean);
break;
@@ -322,6 +317,7 @@ static JSC::JSValue toJS(JSC::Structure* structure, DataCell* cells, uint32_t co
ASSERT(!cell.isIndexedColumn());
ASSERT(cell.isNamedColumn());
if (names.has_value()) {
auto name = names.value()[i];
object->putDirect(vm, Identifier::fromString(vm, name.name.toWTFString()), value);

View File

@@ -15,14 +15,8 @@ struct SerializedValueSlice {
WebCore::SerializedScriptValue* value; // NOLINT
};
enum class SerializedFlags : uint8_t {
None = 0,
ForCrossProcessTransfer = 1 << 0,
ForStorage = 1 << 1,
};
/// Returns a "slice" that also contains a pointer to the SerializedScriptValue. Must be freed by the caller
extern "C" SerializedValueSlice Bun__serializeJSValue(JSGlobalObject* globalObject, EncodedJSValue encodedValue, const SerializedFlags flags)
extern "C" SerializedValueSlice Bun__serializeJSValue(JSGlobalObject* globalObject, EncodedJSValue encodedValue, bool forTransferBool)
{
auto& vm = JSC::getVM(globalObject);
auto scope = DECLARE_THROW_SCOPE(vm);
@@ -30,9 +24,9 @@ extern "C" SerializedValueSlice Bun__serializeJSValue(JSGlobalObject* globalObje
Vector<JSC::Strong<JSC::JSObject>> transferList;
Vector<RefPtr<MessagePort>> dummyPorts;
auto forStorage = (static_cast<uint8_t>(flags) & static_cast<uint8_t>(SerializedFlags::ForStorage)) ? SerializationForStorage::Yes : SerializationForStorage::No;
auto forStorage = SerializationForStorage::No;
auto context = SerializationContext::Default;
auto forTransferEnum = (static_cast<uint8_t>(flags) & static_cast<uint8_t>(SerializedFlags::ForCrossProcessTransfer)) ? SerializationForCrossProcessTransfer::Yes : SerializationForCrossProcessTransfer::No;
auto forTransferEnum = forTransferBool ? SerializationForTransfer::Yes : SerializationForTransfer::No;
ExceptionOr<Ref<SerializedScriptValue>> serialized = SerializedScriptValue::create(*globalObject, value, WTFMove(transferList), dummyPorts, forStorage, context, forTransferEnum);
EXCEPTION_ASSERT(!!scope.exception() == serialized.hasException());

View File

@@ -1,71 +0,0 @@
#include "root.h"
/*
* Copyright (C) 2004-2017 Apple Inc. All rights reserved.
* Copyright (C) 2006 Alexey Proskuryakov <ap@nypop.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// config.h removed - not needed in Bun
#include "TextCodec.h"
#include <unicode/uchar.h>
#include <wtf/TZoneMallocInlines.h>
#include <wtf/text/WTFString.h>
#include <wtf/unicode/CharacterNames.h>
#include <array>
#include <cstdio>
namespace PAL {
WTF_MAKE_TZONE_ALLOCATED_IMPL(TextCodec);
std::span<char> TextCodec::getUnencodableReplacement(char32_t codePoint, UnencodableHandling handling, UnencodableReplacementArray& replacement)
{
ASSERT(!(codePoint > UCHAR_MAX_VALUE));
// The Encoding Standard doesn't have surrogate code points in the input, but that would require
// scanning and potentially manipulating inputs ahead of time. Instead handle them at the last
// possible point.
if (U_IS_SURROGATE(codePoint))
codePoint = replacementCharacter;
switch (handling) {
case UnencodableHandling::Entities: {
int count = SAFE_SPRINTF(std::span { replacement }, "&#%u;", static_cast<unsigned>(codePoint));
ASSERT(count >= 0);
return std::span { replacement }.first(std::max<int>(0, count));
}
case UnencodableHandling::URLEncodedEntities: {
int count = SAFE_SPRINTF(std::span { replacement }, "%%26%%23%u%%3B", static_cast<unsigned>(codePoint));
ASSERT(count >= 0);
return std::span { replacement }.first(std::max<int>(0, count));
}
}
ASSERT_NOT_REACHED();
replacement[0] = '\0';
return std::span { replacement }.first(0);
}
} // namespace PAL

View File

@@ -1,70 +0,0 @@
/*
* Copyright (C) 2004-2020 Apple Inc. All rights reserved.
* Copyright (C) 2006 Alexey Proskuryakov <ap@nypop.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include "UnencodableHandling.h"
#include <array>
#include <memory>
#include <span>
#include <unicode/umachine.h>
#include <wtf/Forward.h>
#include <wtf/Noncopyable.h>
#include <wtf/TZoneMalloc.h>
namespace PAL {
class TextEncoding;
using UnencodableReplacementArray = std::array<char, 32>;
class TextCodec {
WTF_MAKE_TZONE_ALLOCATED(TextCodec);
WTF_MAKE_NONCOPYABLE(TextCodec);
public:
TextCodec() = default;
virtual ~TextCodec() = default;
virtual void stripByteOrderMark() {}
virtual String decode(std::span<const uint8_t> data, bool flush, bool stopOnError, bool& sawError) = 0;
virtual Vector<uint8_t> encode(StringView, UnencodableHandling) const = 0;
// Fills a null-terminated string representation of the given
// unencodable character into the given replacement buffer.
// The length of the string (not including the null) will be returned.
static std::span<char> getUnencodableReplacement(char32_t, UnencodableHandling, UnencodableReplacementArray& replacement LIFETIME_BOUND);
};
Function<void(char32_t, Vector<uint8_t>&)> unencodableHandler(UnencodableHandling);
using EncodingNameRegistrar = void (*)(ASCIILiteral alias, ASCIILiteral name);
using NewTextCodecFunction = Function<std::unique_ptr<TextCodec>()>;
using TextCodecRegistrar = void (*)(ASCIILiteral name, NewTextCodecFunction&&);
} // namespace PAL

View File

@@ -1,46 +0,0 @@
extern fn Bun__createTextCodec(encodingName: [*]const u8, encodingNameLen: usize) ?*TextCodec;
extern fn Bun__decodeWithTextCodec(codec: *TextCodec, data: [*]const u8, length: usize, flush: bool, stopOnError: bool, outSawError: *bool) bun.String;
extern fn Bun__deleteTextCodec(codec: *TextCodec) void;
extern fn Bun__stripBOMFromTextCodec(codec: *TextCodec) void;
extern fn Bun__isEncodingSupported(encodingName: [*]const u8, encodingNameLen: usize) bool;
extern fn Bun__getCanonicalEncodingName(encodingName: [*]const u8, encodingNameLen: usize, outLen: *usize) ?[*]const u8;
pub const TextCodec = opaque {
pub fn create(encoding: []const u8) ?*TextCodec {
jsc.markBinding(@src());
return Bun__createTextCodec(encoding.ptr, encoding.len);
}
pub fn deinit(self: *TextCodec) void {
jsc.markBinding(@src());
Bun__deleteTextCodec(self);
}
pub fn decode(self: *TextCodec, data: []const u8, flush: bool, stopOnError: bool) struct { result: bun.String, sawError: bool } {
jsc.markBinding(@src());
var sawError: bool = false;
const result = Bun__decodeWithTextCodec(self, data.ptr, data.len, flush, stopOnError, &sawError);
return .{ .result = result, .sawError = sawError };
}
pub fn stripBOM(self: *TextCodec) void {
jsc.markBinding(@src());
Bun__stripBOMFromTextCodec(self);
}
pub fn isSupported(encoding: []const u8) bool {
jsc.markBinding(@src());
return Bun__isEncodingSupported(encoding.ptr, encoding.len);
}
pub fn getCanonicalEncodingName(encoding: []const u8) ?[]const u8 {
jsc.markBinding(@src());
var len: usize = 0;
const name = Bun__getCanonicalEncodingName(encoding.ptr, encoding.len, &len) orelse return null;
return name[0..len];
}
};
const bun = @import("bun");
const jsc = bun.jsc;

View File

@@ -1,78 +0,0 @@
/*
* Copyright (C) 2011 Apple Inc. All rights reserved.
* Copyright (C) 2011 Nokia Corporation and/or its subsidiary(-ies).
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <wtf/StdLibExtras.h>
#include <wtf/text/ASCIIFastPath.h>
namespace PAL {
template<size_t size> struct UCharByteFiller;
template<> struct UCharByteFiller<4> {
static void copy(std::span<LChar> destination, std::span<const uint8_t> source)
{
memcpySpan(destination, source.first(4));
}
static void copy(std::span<char16_t> destination, std::span<const uint8_t> source)
{
destination[0] = source[0];
destination[1] = source[1];
destination[2] = source[2];
destination[3] = source[3];
}
};
template<> struct UCharByteFiller<8> {
static void copy(std::span<LChar> destination, std::span<const uint8_t> source)
{
memcpySpan(destination, source.first(8));
}
static void copy(std::span<char16_t> destination, std::span<const uint8_t> source)
{
destination[0] = source[0];
destination[1] = source[1];
destination[2] = source[2];
destination[3] = source[3];
destination[4] = source[4];
destination[5] = source[5];
destination[6] = source[6];
destination[7] = source[7];
}
};
inline void copyASCIIMachineWord(std::span<LChar> destination, std::span<const uint8_t> source)
{
UCharByteFiller<sizeof(WTF::MachineWord)>::copy(destination, source);
}
inline void copyASCIIMachineWord(std::span<char16_t> destination, std::span<const uint8_t> source)
{
UCharByteFiller<sizeof(WTF::MachineWord)>::copy(destination, source);
}
} // namespace PAL

Some files were not shown because too many files have changed in this diff Show More