mirror of
https://github.com/oven-sh/bun
synced 2026-02-21 00:02:19 +00:00
Compare commits
3 Commits
dylan/fix-
...
claude/fix
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2c5da98665 | ||
|
|
f7df2af737 | ||
|
|
42c3217f79 |
@@ -114,8 +114,6 @@ const buildPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22" },
|
||||
{ os: "windows", arch: "x64", release: "2019" },
|
||||
{ os: "windows", arch: "x64", baseline: true, release: "2019" },
|
||||
// TODO: Enable when Windows ARM64 CI runners are ready
|
||||
// { os: "windows", arch: "aarch64", release: "2019" },
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -138,8 +136,6 @@ const testPlatforms = [
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.22", tier: "latest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", tier: "oldest" },
|
||||
{ os: "windows", arch: "x64", release: "2019", baseline: true, tier: "oldest" },
|
||||
// TODO: Enable when Windows ARM64 CI runners are ready
|
||||
// { os: "windows", arch: "aarch64", release: "2019", tier: "oldest" },
|
||||
];
|
||||
|
||||
/**
|
||||
|
||||
@@ -36,20 +36,16 @@ function Log-Debug {
|
||||
}
|
||||
}
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
# Load Visual Studio environment if not already loaded
|
||||
function Ensure-VSEnvironment {
|
||||
if ($null -eq $env:VSINSTALLDIR) {
|
||||
Log-Info "Loading Visual Studio environment for $script:VsArch..."
|
||||
|
||||
Log-Info "Loading Visual Studio environment..."
|
||||
|
||||
$vswhere = "C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere.exe"
|
||||
if (!(Test-Path $vswhere)) {
|
||||
throw "Command not found: vswhere (did you install Visual Studio?)"
|
||||
}
|
||||
|
||||
|
||||
$vsDir = & $vswhere -prerelease -latest -property installationPath
|
||||
if ($null -eq $vsDir) {
|
||||
$vsDir = Get-ChildItem -Path "C:\Program Files\Microsoft Visual Studio\2022" -Directory -ErrorAction SilentlyContinue
|
||||
@@ -58,20 +54,20 @@ function Ensure-VSEnvironment {
|
||||
}
|
||||
$vsDir = $vsDir.FullName
|
||||
}
|
||||
|
||||
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1"
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
|
||||
Log-Success "Visual Studio environment loaded"
|
||||
}
|
||||
|
||||
|
||||
if ($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
}
|
||||
|
||||
@@ -190,10 +186,8 @@ function Install-KeyLocker {
|
||||
}
|
||||
|
||||
# Download MSI installer
|
||||
# Note: KeyLocker tools currently only available for x64, but works on ARM64 via emulation
|
||||
$msiArch = "x64"
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-${msiArch}.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-${msiArch}.msi"
|
||||
$msiUrl = "https://bun-ci-assets.bun.sh/Keylockertools-windows-x64.msi"
|
||||
$msiPath = Join-Path $env:TEMP "Keylockertools-windows-x64.msi"
|
||||
|
||||
Log-Info "Downloading MSI from: $msiUrl"
|
||||
Log-Info "Downloading to: $msiPath"
|
||||
|
||||
@@ -219,9 +219,6 @@ function create_release() {
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
# TODO: Enable when Windows ARM64 CI runners are ready
|
||||
# bun-windows-aarch64.zip
|
||||
# bun-windows-aarch64-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
|
||||
2
.github/workflows/update-cares.yml
vendored
2
.github/workflows/update-cares.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-hdrhistogram.yml
vendored
2
.github/workflows/update-hdrhistogram.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
commit-message: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update hdrhistogram to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-hdrhistogram
|
||||
branch: deps/update-hdrhistogram-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-highway.yml
vendored
2
.github/workflows/update-highway.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
commit-message: "deps: update highway to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update highway to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-highway
|
||||
branch: deps/update-highway-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libarchive.yml
vendored
2
.github/workflows/update-libarchive.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-libdeflate.yml
vendored
2
.github/workflows/update-libdeflate.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lolhtml.yml
vendored
2
.github/workflows/update-lolhtml.yml
vendored
@@ -100,7 +100,7 @@ jobs:
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-lshpack.yml
vendored
2
.github/workflows/update-lshpack.yml
vendored
@@ -105,7 +105,7 @@ jobs:
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-root-certs.yml
vendored
2
.github/workflows/update-root-certs.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs
|
||||
branch: certs/update-root-certs-${{ github.run_number }}
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
|
||||
2
.github/workflows/update-sqlite3.yml
vendored
2
.github/workflows/update-sqlite3.yml
vendored
@@ -83,7 +83,7 @@ jobs:
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-vendor.yml
vendored
2
.github/workflows/update-vendor.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
commit-message: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update ${{ matrix.package }} to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-${{ matrix.package }}
|
||||
branch: deps/update-${{ matrix.package }}-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
2
.github/workflows/update-zstd.yml
vendored
2
.github/workflows/update-zstd.yml
vendored
@@ -88,7 +88,7 @@ jobs:
|
||||
commit-message: "deps: update zstd to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update zstd to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-zstd
|
||||
branch: deps/update-zstd-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
.claude/settings.local.json
|
||||
.direnv
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
|
||||
@@ -36,7 +36,6 @@ Bun statically links these libraries:
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| [`uucode`](https://github.com/jacobsandlund/uucode) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"configVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "json5-benchmark",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
}
|
||||
}
|
||||
@@ -1,88 +0,0 @@
|
||||
import JSON5 from "json5";
|
||||
import { bench, group, run } from "../runner.mjs";
|
||||
|
||||
const isBun = typeof Bun !== "undefined" && Bun.JSON5;
|
||||
|
||||
function sizeLabel(n) {
|
||||
if (n >= 1024 * 1024) return `${(n / 1024 / 1024).toFixed(1)}MB`;
|
||||
if (n >= 1024) return `${(n / 1024).toFixed(0)}KB`;
|
||||
return `${n}B`;
|
||||
}
|
||||
|
||||
// -- parse inputs --
|
||||
|
||||
const smallJson5 = `{
|
||||
// User profile
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: 'john@example.com',
|
||||
active: true,
|
||||
}`;
|
||||
|
||||
function generateLargeJson5(count) {
|
||||
const lines = ["{\n // Auto-generated dataset\n items: [\n"];
|
||||
for (let i = 0; i < count; i++) {
|
||||
lines.push(` {
|
||||
id: ${i},
|
||||
name: 'item_${i}',
|
||||
value: ${(Math.random() * 1000).toFixed(2)},
|
||||
hex: 0x${i.toString(16).toUpperCase()},
|
||||
active: ${i % 2 === 0},
|
||||
tags: ['tag_${i % 10}', 'category_${i % 5}',],
|
||||
// entry ${i}
|
||||
},\n`);
|
||||
}
|
||||
lines.push(" ],\n total: " + count + ",\n status: 'complete',\n}\n");
|
||||
return lines.join("");
|
||||
}
|
||||
|
||||
const largeJson5 = generateLargeJson5(6500);
|
||||
|
||||
// -- stringify inputs --
|
||||
|
||||
const smallObject = {
|
||||
name: "John Doe",
|
||||
age: 30,
|
||||
email: "john@example.com",
|
||||
active: true,
|
||||
};
|
||||
|
||||
const largeObject = {
|
||||
items: Array.from({ length: 10000 }, (_, i) => ({
|
||||
id: i,
|
||||
name: `item_${i}`,
|
||||
value: +(Math.random() * 1000).toFixed(2),
|
||||
active: i % 2 === 0,
|
||||
tags: [`tag_${i % 10}`, `category_${i % 5}`],
|
||||
})),
|
||||
total: 10000,
|
||||
status: "complete",
|
||||
};
|
||||
|
||||
const stringify = isBun ? Bun.JSON5.stringify : JSON5.stringify;
|
||||
|
||||
// -- parse benchmarks --
|
||||
|
||||
group(`parse small (${sizeLabel(smallJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(smallJson5));
|
||||
bench("json5.parse", () => JSON5.parse(smallJson5));
|
||||
});
|
||||
|
||||
group(`parse large (${sizeLabel(largeJson5.length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.parse", () => Bun.JSON5.parse(largeJson5));
|
||||
bench("json5.parse", () => JSON5.parse(largeJson5));
|
||||
});
|
||||
|
||||
// -- stringify benchmarks --
|
||||
|
||||
group(`stringify small (${sizeLabel(stringify(smallObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(smallObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(smallObject));
|
||||
});
|
||||
|
||||
group(`stringify large (${sizeLabel(stringify(largeObject).length)})`, () => {
|
||||
if (isBun) bench("Bun.JSON5.stringify", () => Bun.JSON5.stringify(largeObject));
|
||||
bench("json5.stringify", () => JSON5.stringify(largeObject));
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "json5-benchmark",
|
||||
"version": "1.0.0",
|
||||
"dependencies": {
|
||||
"json5": "^2.2.3"
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
// Benchmark for [...set] optimization (WebKit#56539)
|
||||
// https://github.com/WebKit/WebKit/pull/56539
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const intSet10 = new Set([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
const intSet100 = new Set(Array.from({ length: 100 }, (_, i) => i));
|
||||
const strSet10 = new Set(Array.from({ length: 10 }, (_, i) => `key-${i}`));
|
||||
const strSet100 = new Set(Array.from({ length: 100 }, (_, i) => `key-${i}`));
|
||||
|
||||
const objSet10 = new Set(Array.from({ length: 10 }, (_, i) => ({ id: i })));
|
||||
const objSet100 = new Set(Array.from({ length: 100 }, (_, i) => ({ id: i })));
|
||||
|
||||
bench("[...set] - integers (10)", () => [...intSet10]);
|
||||
bench("[...set] - integers (100)", () => [...intSet100]);
|
||||
bench("[...set] - strings (10)", () => [...strSet10]);
|
||||
bench("[...set] - strings (100)", () => [...strSet100]);
|
||||
bench("[...set] - objects (10)", () => [...objSet10]);
|
||||
bench("[...set] - objects (100)", () => [...objSet100]);
|
||||
|
||||
await run();
|
||||
150
build.zig
150
build.zig
@@ -34,7 +34,6 @@ const BunBuildOptions = struct {
|
||||
enable_asan: bool,
|
||||
enable_fuzzilli: bool,
|
||||
enable_valgrind: bool,
|
||||
enable_tinycc: bool,
|
||||
use_mimalloc: bool,
|
||||
tracy_callstack_depth: u16,
|
||||
reported_nodejs_version: Version,
|
||||
@@ -85,7 +84,6 @@ const BunBuildOptions = struct {
|
||||
opts.addOption(bool, "enable_asan", this.enable_asan);
|
||||
opts.addOption(bool, "enable_fuzzilli", this.enable_fuzzilli);
|
||||
opts.addOption(bool, "enable_valgrind", this.enable_valgrind);
|
||||
opts.addOption(bool, "enable_tinycc", this.enable_tinycc);
|
||||
opts.addOption(bool, "use_mimalloc", this.use_mimalloc);
|
||||
opts.addOption([]const u8, "reported_nodejs_version", b.fmt("{f}", .{this.reported_nodejs_version}));
|
||||
opts.addOption(bool, "zig_self_hosted_backend", this.no_llvm);
|
||||
@@ -261,7 +259,6 @@ pub fn build(b: *Build) !void {
|
||||
.enable_asan = b.option(bool, "enable_asan", "Enable asan") orelse false,
|
||||
.enable_fuzzilli = b.option(bool, "enable_fuzzilli", "Enable fuzzilli instrumentation") orelse false,
|
||||
.enable_valgrind = b.option(bool, "enable_valgrind", "Enable valgrind") orelse false,
|
||||
.enable_tinycc = b.option(bool, "enable_tinycc", "Enable TinyCC for FFI JIT compilation") orelse true,
|
||||
.use_mimalloc = b.option(bool, "use_mimalloc", "Use mimalloc as default allocator") orelse false,
|
||||
.llvm_codegen_threads = b.option(u32, "llvm_codegen_threads", "Number of threads to use for LLVM codegen") orelse 1,
|
||||
};
|
||||
@@ -345,7 +342,6 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-debug", "Check for semantic analysis errors on some platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
}, &.{.Debug});
|
||||
@@ -356,7 +352,6 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -371,7 +366,6 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-all-debug", "Check for semantic analysis errors on all supported platforms in debug mode");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -386,14 +380,12 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("check-windows", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
}, &.{ .Debug, .ReleaseFast });
|
||||
}
|
||||
{
|
||||
const step = b.step("check-windows-debug", "Check for semantic analysis errors on Windows");
|
||||
addMultiCheck(b, step, build_options, &.{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
}, &.{.Debug});
|
||||
}
|
||||
{
|
||||
@@ -430,7 +422,6 @@ pub fn build(b: *Build) !void {
|
||||
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
|
||||
for ([_]TargetDescription{
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
.{ .os = .windows, .arch = .aarch64 },
|
||||
.{ .os = .mac, .arch = .x86_64 },
|
||||
.{ .os = .mac, .arch = .aarch64 },
|
||||
.{ .os = .linux, .arch = .x86_64 },
|
||||
@@ -459,146 +450,6 @@ pub fn build(b: *Build) !void {
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
|
||||
// zig build generate-grapheme-tables
|
||||
// Regenerates src/string/immutable/grapheme_tables.zig from the vendored uucode.
|
||||
// Run this when updating src/deps/uucode. Normal builds use the committed file.
|
||||
{
|
||||
const step = b.step("generate-grapheme-tables", "Regenerate grapheme property tables from vendored uucode");
|
||||
|
||||
// --- Phase 1: Build uucode tables (separate module graph, no tables dependency) ---
|
||||
const bt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_config_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const bt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const bt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_types_x_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
bt_config_x_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_config_x_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_config_x_mod.addImport("config.zig", bt_config_mod);
|
||||
|
||||
const bt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
bt_build_config_mod.addImport("types.zig", bt_types_mod);
|
||||
bt_build_config_mod.addImport("config.zig", bt_config_mod);
|
||||
bt_build_config_mod.addImport("types.x.zig", bt_types_x_mod);
|
||||
bt_build_config_mod.addImport("config.x.zig", bt_config_x_mod);
|
||||
|
||||
const build_tables_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/build/tables.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
});
|
||||
build_tables_mod.addImport("config.zig", bt_config_mod);
|
||||
build_tables_mod.addImport("build_config", bt_build_config_mod);
|
||||
build_tables_mod.addImport("types.zig", bt_types_mod);
|
||||
|
||||
const build_tables_exe = b.addExecutable(.{
|
||||
.name = "uucode_build_tables",
|
||||
.root_module = build_tables_mod,
|
||||
.use_llvm = true,
|
||||
});
|
||||
const run_build_tables = b.addRunArtifact(build_tables_exe);
|
||||
run_build_tables.setCwd(b.path("src/deps/uucode"));
|
||||
const tables_path = run_build_tables.addOutputFileArg("tables.zig");
|
||||
|
||||
// --- Phase 2: Build grapheme-gen with full uucode (separate module graph) ---
|
||||
const rt_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/types.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_config_mod.addImport("types.zig", rt_types_mod);
|
||||
|
||||
const rt_config_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/config.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
const rt_types_x_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/x/types.x.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_types_x_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
rt_config_x_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_config_x_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_config_x_mod.addImport("config.zig", rt_config_mod);
|
||||
|
||||
const rt_build_config_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/uucode_config.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_build_config_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_build_config_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_build_config_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_build_config_mod.addImport("config.x.zig", rt_config_x_mod);
|
||||
|
||||
const rt_tables_mod = b.createModule(.{
|
||||
.root_source_file = tables_path,
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_tables_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_tables_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
rt_tables_mod.addImport("config.zig", rt_config_mod);
|
||||
rt_tables_mod.addImport("build_config", rt_build_config_mod);
|
||||
|
||||
const rt_get_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/get.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
rt_get_mod.addImport("types.zig", rt_types_mod);
|
||||
rt_get_mod.addImport("tables", rt_tables_mod);
|
||||
rt_types_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
const uucode_mod = b.createModule(.{
|
||||
.root_source_file = b.path("src/deps/uucode/src/root.zig"),
|
||||
.target = b.graph.host,
|
||||
});
|
||||
uucode_mod.addImport("types.zig", rt_types_mod);
|
||||
uucode_mod.addImport("config.zig", rt_config_mod);
|
||||
uucode_mod.addImport("types.x.zig", rt_types_x_mod);
|
||||
uucode_mod.addImport("tables", rt_tables_mod);
|
||||
uucode_mod.addImport("get.zig", rt_get_mod);
|
||||
|
||||
// grapheme_gen executable
|
||||
const gen_exe = b.addExecutable(.{
|
||||
.name = "grapheme-gen",
|
||||
.root_module = b.createModule(.{
|
||||
.root_source_file = b.path("src/unicode/uucode/grapheme_gen.zig"),
|
||||
.target = b.graph.host,
|
||||
.optimize = .Debug,
|
||||
.imports = &.{
|
||||
.{ .name = "uucode", .module = uucode_mod },
|
||||
},
|
||||
}),
|
||||
.use_llvm = true,
|
||||
});
|
||||
|
||||
const run_gen = b.addRunArtifact(gen_exe);
|
||||
const gen_output = run_gen.captureStdOut();
|
||||
|
||||
const install = b.addInstallFile(gen_output, "../src/string/immutable/grapheme_tables.zig");
|
||||
step.dependOn(&install.step);
|
||||
}
|
||||
}
|
||||
|
||||
const TargetDescription = struct {
|
||||
@@ -642,7 +493,6 @@ fn addMultiCheck(
|
||||
.no_llvm = root_build_options.no_llvm,
|
||||
.enable_asan = root_build_options.enable_asan,
|
||||
.enable_valgrind = root_build_options.enable_valgrind,
|
||||
.enable_tinycc = root_build_options.enable_tinycc,
|
||||
.enable_fuzzilli = root_build_options.enable_fuzzilli,
|
||||
.use_mimalloc = root_build_options.use_mimalloc,
|
||||
.override_no_export_cpp_apis = root_build_options.override_no_export_cpp_apis,
|
||||
|
||||
@@ -21,10 +21,6 @@ endforeach()
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
register_compiler_flags(-mcpu=apple-m1)
|
||||
elseif(WIN32)
|
||||
# Windows ARM64: use /clang: prefix for clang-cl, skip for MSVC cl.exe subprojects
|
||||
# These flags are only understood by clang-cl, not MSVC cl.exe
|
||||
register_compiler_flags(/clang:-march=armv8-a+crc /clang:-mtune=ampere1)
|
||||
else()
|
||||
register_compiler_flags(-march=armv8-a+crc -mtune=ampere1)
|
||||
endif()
|
||||
@@ -246,17 +242,10 @@ if(UNIX)
|
||||
)
|
||||
endif()
|
||||
|
||||
if(WIN32)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
/clang:-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
else()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
endif()
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Set C/C++ error limit"
|
||||
-ferror-limit=${ERROR_LIMIT}
|
||||
)
|
||||
|
||||
# --- LTO ---
|
||||
if(ENABLE_LTO)
|
||||
|
||||
@@ -106,9 +106,9 @@ else()
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
set(HOST_ARCH "aarch64")
|
||||
set(HOST_OS "aarch64")
|
||||
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64|X86_64|x64|X64|amd64|AMD64")
|
||||
set(HOST_ARCH "x64")
|
||||
set(HOST_OS "x64")
|
||||
else()
|
||||
unsupported(CMAKE_HOST_SYSTEM_PROCESSOR)
|
||||
endif()
|
||||
@@ -433,33 +433,6 @@ function(register_command)
|
||||
list(APPEND CMD_EFFECTIVE_DEPENDS ${CMD_EXECUTABLE})
|
||||
endif()
|
||||
|
||||
# SKIP_CODEGEN: Skip commands that use BUN_EXECUTABLE if all outputs exist
|
||||
# This is used for Windows ARM64 builds where x64 bun crashes under emulation
|
||||
if(SKIP_CODEGEN AND CMD_EXECUTABLE STREQUAL "${BUN_EXECUTABLE}")
|
||||
set(ALL_OUTPUTS_EXIST TRUE)
|
||||
foreach(output ${CMD_OUTPUTS})
|
||||
if(NOT EXISTS ${output})
|
||||
set(ALL_OUTPUTS_EXIST FALSE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
if(ALL_OUTPUTS_EXIST AND CMD_OUTPUTS)
|
||||
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (outputs exist)")
|
||||
if(CMD_TARGET)
|
||||
add_custom_target(${CMD_TARGET})
|
||||
endif()
|
||||
return()
|
||||
elseif(NOT CMD_OUTPUTS)
|
||||
message(STATUS "SKIP_CODEGEN: Skipping ${CMD_TARGET} (no outputs)")
|
||||
if(CMD_TARGET)
|
||||
add_custom_target(${CMD_TARGET})
|
||||
endif()
|
||||
return()
|
||||
else()
|
||||
message(FATAL_ERROR "SKIP_CODEGEN: Cannot skip ${CMD_TARGET} - missing outputs. Run codegen on x64 first.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
foreach(target ${CMD_TARGETS})
|
||||
if(target MATCHES "/|\\\\")
|
||||
message(FATAL_ERROR "register_command: TARGETS contains \"${target}\", if it's a path add it to SOURCES instead")
|
||||
@@ -677,7 +650,6 @@ function(register_bun_install)
|
||||
${NPM_CWD}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
install
|
||||
--frozen-lockfile
|
||||
SOURCES
|
||||
@@ -785,7 +757,7 @@ function(register_cmake_command)
|
||||
set(MAKE_EFFECTIVE_ARGS -B${MAKE_BUILD_PATH} ${CMAKE_ARGS})
|
||||
|
||||
set(setFlags GENERATOR BUILD_TYPE)
|
||||
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS STATIC_LINKER_FLAGS EXE_LINKER_FLAGS SHARED_LINKER_FLAGS MODULE_LINKER_FLAGS)
|
||||
set(appendFlags C_FLAGS CXX_FLAGS LINKER_FLAGS)
|
||||
set(specialFlags POSITION_INDEPENDENT_CODE)
|
||||
set(flags ${setFlags} ${appendFlags} ${specialFlags})
|
||||
|
||||
@@ -831,14 +803,6 @@ function(register_cmake_command)
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_${flag}=${MAKE_${flag}}")
|
||||
endforeach()
|
||||
|
||||
# Workaround for CMake 4.1.0 bug: Force correct machine type for Windows ARM64
|
||||
# Use toolchain file and set CMP0197 policy to prevent duplicate /machine: flags
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_TOOLCHAIN_FILE=${CWD}/cmake/toolchains/windows-aarch64.cmake")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_POLICY_DEFAULT_CMP0197=NEW")
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS "-DCMAKE_PROJECT_INCLUDE=${CWD}/cmake/arm64-static-lib-fix.cmake")
|
||||
endif()
|
||||
|
||||
if(DEFINED FRESH)
|
||||
list(APPEND MAKE_EFFECTIVE_ARGS --fresh)
|
||||
endif()
|
||||
|
||||
@@ -4,7 +4,6 @@ endif()
|
||||
|
||||
optionx(BUN_LINK_ONLY BOOL "If only the linking step should be built" DEFAULT OFF)
|
||||
optionx(BUN_CPP_ONLY BOOL "If only the C++ part of Bun should be built" DEFAULT OFF)
|
||||
optionx(SKIP_CODEGEN BOOL "Skip JavaScript codegen (for Windows ARM64 debug)" DEFAULT OFF)
|
||||
|
||||
optionx(BUILDKITE BOOL "If Buildkite is enabled" DEFAULT OFF)
|
||||
optionx(GITHUB_ACTIONS BOOL "If GitHub Actions is enabled" DEFAULT OFF)
|
||||
@@ -50,7 +49,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|ARM64")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "aarch64|arm64|arm")
|
||||
setx(ARCH "aarch64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
setx(ARCH "x64")
|
||||
@@ -58,18 +57,6 @@ else()
|
||||
message(FATAL_ERROR "Unsupported architecture: ${CMAKE_SYSTEM_PROCESSOR}")
|
||||
endif()
|
||||
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
# Setting to NEW prevents duplicate /machine: flags being added to linker commands
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW)
|
||||
set(CMAKE_MSVC_CMP0197 NEW)
|
||||
# Set linker flags for exe/shared linking
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /machine:ARM64")
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} /machine:ARM64")
|
||||
endif()
|
||||
|
||||
# Windows Code Signing Option
|
||||
if(WIN32)
|
||||
optionx(ENABLE_WINDOWS_CODESIGNING BOOL "Enable Windows code signing with DigiCert KeyLocker" DEFAULT OFF)
|
||||
@@ -212,16 +199,6 @@ optionx(USE_WEBKIT_ICU BOOL "Use the ICU libraries from WebKit" DEFAULT ${DEFAUL
|
||||
|
||||
optionx(ERROR_LIMIT STRING "Maximum number of errors to show when compiling C++ code" DEFAULT "100")
|
||||
|
||||
# TinyCC is used for FFI JIT compilation
|
||||
# Disable on Windows ARM64 where it's not yet supported
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(DEFAULT_ENABLE_TINYCC OFF)
|
||||
else()
|
||||
set(DEFAULT_ENABLE_TINYCC ON)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_TINYCC BOOL "Enable TinyCC for FFI JIT compilation" DEFAULT ${DEFAULT_ENABLE_TINYCC})
|
||||
|
||||
# This is not an `option` because setting this variable to OFF is experimental
|
||||
# and unsupported. This replaces the `use_mimalloc` variable previously in
|
||||
# bun.zig, and enables C++ code to also be aware of the option.
|
||||
|
||||
@@ -13,7 +13,10 @@
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptSources.txt",
|
||||
"paths": ["src/js/**/*.{js,ts}", "src/install/PackageManager/scanner-entry.ts"]
|
||||
"paths": [
|
||||
"src/js/**/*.{js,ts}",
|
||||
"src/install/PackageManager/scanner-entry.ts"
|
||||
]
|
||||
},
|
||||
{
|
||||
"output": "JavaScriptCodegenSources.txt",
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
# This file is included after project() via CMAKE_PROJECT_INCLUDE
|
||||
# It fixes the static library creation command to use ARM64 machine type
|
||||
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR STREQUAL \"aarch64\")
|
||||
# Override the static library creation commands to avoid spurious /machine:x64 flags
|
||||
set(CMAKE_C_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
|
||||
set(CMAKE_CXX_CREATE_STATIC_LIBRARY \"<CMAKE_AR> /nologo /machine:ARM64 /out:<TARGET> <OBJECTS>\" CACHE STRING \"\" FORCE)
|
||||
endif()
|
||||
@@ -21,12 +21,7 @@ if(NOT DEFINED CMAKE_HOST_SYSTEM_PROCESSOR)
|
||||
endif()
|
||||
|
||||
if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
# Windows ARM64 can run x86_64 via emulation, and no native ARM64 Zig build exists yet
|
||||
if(CMAKE_HOST_WIN32)
|
||||
set(ZIG_ARCH "x86_64")
|
||||
else()
|
||||
set(ZIG_ARCH "aarch64")
|
||||
endif()
|
||||
set(ZIG_ARCH "aarch64")
|
||||
elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "amd64|AMD64|x86_64|X86_64|x64|X64")
|
||||
set(ZIG_ARCH "x86_64")
|
||||
else()
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
REM This is a workaround for CMake 4.1.0 bug
|
||||
|
||||
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if defined LLVM_LIB (
|
||||
set "LLVM_LIB_EXE=!LLVM_LIB!"
|
||||
) else (
|
||||
where llvm-lib.exe >nul 2>&1
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
|
||||
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
|
||||
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
) else (
|
||||
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
set "ARGS="
|
||||
|
||||
for %%a in (%*) do (
|
||||
set "ARG=%%a"
|
||||
if /i "!ARG!"=="/machine:x64" (
|
||||
REM Skip this argument
|
||||
) else (
|
||||
set "ARGS=!ARGS! %%a"
|
||||
)
|
||||
)
|
||||
|
||||
"!LLVM_LIB_EXE!" %ARGS%
|
||||
exit /b %ERRORLEVEL%
|
||||
@@ -1,18 +0,0 @@
|
||||
# Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
# This is a workaround for CMake 4.1.0 bug where both /machine:ARM64 and /machine:x64 are added
|
||||
|
||||
# Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if ($env:LLVM_LIB) {
|
||||
$llvmLib = $env:LLVM_LIB
|
||||
} elseif (Get-Command llvm-lib.exe -ErrorAction SilentlyContinue) {
|
||||
$llvmLib = (Get-Command llvm-lib.exe).Source
|
||||
} elseif (Test-Path "C:\Program Files\LLVM\bin\llvm-lib.exe") {
|
||||
$llvmLib = "C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
} else {
|
||||
Write-Error "Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH."
|
||||
exit 1
|
||||
}
|
||||
|
||||
$filteredArgs = $args | Where-Object { $_ -ne "/machine:x64" }
|
||||
& $llvmLib @filteredArgs
|
||||
exit $LASTEXITCODE
|
||||
@@ -1,34 +0,0 @@
|
||||
@echo off
|
||||
setlocal enabledelayedexpansion
|
||||
|
||||
REM Wrapper for llvm-lib that strips conflicting /machine:x64 flag for ARM64 builds
|
||||
REM This is a workaround for CMake 4.1.0 bug
|
||||
|
||||
REM Find llvm-lib.exe - check LLVM_LIB env var, then PATH, then known locations
|
||||
if defined LLVM_LIB (
|
||||
set "LLVM_LIB_EXE=!LLVM_LIB!"
|
||||
) else (
|
||||
where llvm-lib.exe >nul 2>&1
|
||||
if !ERRORLEVEL! equ 0 (
|
||||
for /f "delims=" %%i in ('where llvm-lib.exe') do set "LLVM_LIB_EXE=%%i"
|
||||
) else if exist "C:\Program Files\LLVM\bin\llvm-lib.exe" (
|
||||
set "LLVM_LIB_EXE=C:\Program Files\LLVM\bin\llvm-lib.exe"
|
||||
) else (
|
||||
echo Error: Cannot find llvm-lib.exe. Set LLVM_LIB environment variable or add LLVM to PATH.
|
||||
exit /b 1
|
||||
)
|
||||
)
|
||||
|
||||
set NEWARGS=
|
||||
|
||||
for %%a in (%*) do (
|
||||
set "ARG=%%a"
|
||||
if /i "!ARG!"=="/machine:x64" (
|
||||
REM Skip /machine:x64 argument
|
||||
) else (
|
||||
set "NEWARGS=!NEWARGS! %%a"
|
||||
)
|
||||
)
|
||||
|
||||
"!LLVM_LIB_EXE!" %NEWARGS%
|
||||
exit /b %ERRORLEVEL%
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
4f4f5ef8ebc6e23cbf393428f0ab1b526773f7ac
|
||||
f1ffd9e83d4f5c28a9c70d73f9a4e6fcf310062f
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -57,17 +57,13 @@ set(BUN_DEPENDENCIES
|
||||
LolHtml
|
||||
Lshpack
|
||||
Mimalloc
|
||||
TinyCC
|
||||
Zlib
|
||||
LibArchive # must be loaded after zlib
|
||||
HdrHistogram # must be loaded after zlib
|
||||
Zstd
|
||||
)
|
||||
|
||||
# TinyCC is optional - disabled on Windows ARM64 where it's not supported
|
||||
if(ENABLE_TINYCC)
|
||||
list(APPEND BUN_DEPENDENCIES TinyCC)
|
||||
endif()
|
||||
|
||||
include(CloneZstd)
|
||||
|
||||
# --- Codegen ---
|
||||
@@ -189,7 +185,7 @@ register_command(
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} run build-fallbacks
|
||||
${BUN_EXECUTABLE} run build-fallbacks
|
||||
${BUN_NODE_FALLBACKS_OUTPUT}
|
||||
${BUN_NODE_FALLBACKS_SOURCES}
|
||||
SOURCES
|
||||
@@ -210,7 +206,7 @@ register_command(
|
||||
CWD
|
||||
${BUN_NODE_FALLBACKS_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} build
|
||||
${BUN_EXECUTABLE} build
|
||||
${BUN_NODE_FALLBACKS_SOURCE}/node_modules/react-refresh/cjs/react-refresh-runtime.development.js
|
||||
--outfile=${BUN_REACT_REFRESH_OUTPUT}
|
||||
--target=browser
|
||||
@@ -247,7 +243,6 @@ register_command(
|
||||
"Generating ErrorCode.{zig,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_ERROR_CODE_SCRIPT}
|
||||
${CODEGEN_PATH}
|
||||
@@ -283,7 +278,6 @@ register_command(
|
||||
"Generating ZigGeneratedClasses.{zig,cpp,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_ZIG_GENERATED_CLASSES_SCRIPT}
|
||||
${BUN_ZIG_GENERATED_CLASSES_SOURCES}
|
||||
@@ -334,7 +328,6 @@ register_command(
|
||||
"Generating C++ --> Zig bindings"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
${CWD}/src/codegen/cppbind.ts
|
||||
${CWD}/src
|
||||
${CODEGEN_PATH}
|
||||
@@ -352,7 +345,6 @@ register_command(
|
||||
"Generating CI info"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
${CWD}/src/codegen/ci_info.ts
|
||||
${CODEGEN_PATH}/ci_info.zig
|
||||
SOURCES
|
||||
@@ -361,35 +353,24 @@ register_command(
|
||||
${BUN_CI_INFO_OUTPUTS}
|
||||
)
|
||||
|
||||
if(SKIP_CODEGEN)
|
||||
# Skip JavaScript codegen - useful for Windows ARM64 debug builds where bun crashes
|
||||
message(STATUS "SKIP_CODEGEN is ON - skipping bun-js-modules codegen")
|
||||
foreach(output ${BUN_JAVASCRIPT_OUTPUTS})
|
||||
if(NOT EXISTS ${output})
|
||||
message(FATAL_ERROR "SKIP_CODEGEN is ON but ${output} does not exist. Run codegen manually first.")
|
||||
endif()
|
||||
endforeach()
|
||||
else()
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
COMMENT
|
||||
"Generating JavaScript modules"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
${BUILD_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
register_command(
|
||||
TARGET
|
||||
bun-js-modules
|
||||
COMMENT
|
||||
"Generating JavaScript modules"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
)
|
||||
endif()
|
||||
--debug=${DEBUG}
|
||||
${BUILD_PATH}
|
||||
SOURCES
|
||||
${BUN_JAVASCRIPT_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SOURCES}
|
||||
${BUN_JAVASCRIPT_CODEGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BAKE_RUNTIME_CODEGEN_SCRIPT ${CWD}/src/codegen/bake-codegen.ts)
|
||||
|
||||
@@ -411,7 +392,6 @@ register_command(
|
||||
"Bundling Bake Runtime"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
@@ -435,7 +415,7 @@ string(REPLACE ";" "," BUN_BINDGENV2_SOURCES_COMMA_SEPARATED
|
||||
"${BUN_BINDGENV2_SOURCES}")
|
||||
|
||||
execute_process(
|
||||
COMMAND ${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
|
||||
COMMAND ${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=list-outputs
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
@@ -458,7 +438,7 @@ register_command(
|
||||
COMMENT
|
||||
"Generating bindings (v2)"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} run ${BUN_BINDGENV2_SCRIPT}
|
||||
${BUN_EXECUTABLE} run ${BUN_BINDGENV2_SCRIPT}
|
||||
--command=generate
|
||||
--codegen-path=${CODEGEN_PATH}
|
||||
--sources=${BUN_BINDGENV2_SOURCES_COMMA_SEPARATED}
|
||||
@@ -489,7 +469,6 @@ register_command(
|
||||
"Processing \".bind.ts\" files"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
@@ -522,7 +501,6 @@ register_command(
|
||||
"Generating JSSink.{cpp,h}"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_JS_SINK_SCRIPT}
|
||||
${CODEGEN_PATH}
|
||||
@@ -595,7 +573,6 @@ foreach(i RANGE 0 ${BUN_OBJECT_LUT_SOURCES_MAX_INDEX})
|
||||
${BUN_OBJECT_LUT_SOURCE}
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
${BUN_FLAGS}
|
||||
run
|
||||
${BUN_OBJECT_LUT_SCRIPT}
|
||||
${BUN_OBJECT_LUT_SOURCE}
|
||||
@@ -679,10 +656,6 @@ endif()
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
set(ZIG_CPU "apple_m1")
|
||||
elseif(WIN32)
|
||||
# Windows ARM64: use a specific CPU with NEON support
|
||||
# Zig running under x64 emulation would detect wrong CPU with "native"
|
||||
set(ZIG_CPU "cortex_a76")
|
||||
else()
|
||||
set(ZIG_CPU "native")
|
||||
endif()
|
||||
@@ -721,7 +694,6 @@ register_command(
|
||||
-Denable_asan=$<IF:$<BOOL:${ENABLE_ZIG_ASAN}>,true,false>
|
||||
-Denable_fuzzilli=$<IF:$<BOOL:${ENABLE_FUZZILLI}>,true,false>
|
||||
-Denable_valgrind=$<IF:$<BOOL:${ENABLE_VALGRIND}>,true,false>
|
||||
-Denable_tinycc=$<IF:$<BOOL:${ENABLE_TINYCC}>,true,false>
|
||||
-Duse_mimalloc=$<IF:$<BOOL:${USE_MIMALLOC_AS_DEFAULT_ALLOCATOR}>,true,false>
|
||||
-Dllvm_codegen_threads=${LLVM_ZIG_CODEGEN_THREADS}
|
||||
-Dversion=${VERSION}
|
||||
@@ -939,7 +911,7 @@ if(WIN32)
|
||||
endif()
|
||||
|
||||
if(USE_MIMALLOC_AS_DEFAULT_ALLOCATOR)
|
||||
target_compile_definitions(${bun} PRIVATE USE_BUN_MIMALLOC=1)
|
||||
target_compile_definitions(${bun} PRIVATE USE_MIMALLOC=1)
|
||||
endif()
|
||||
|
||||
target_compile_definitions(${bun} PRIVATE
|
||||
@@ -1239,7 +1211,7 @@ if(BUN_LINK_ONLY)
|
||||
WEBKIT_DOWNLOAD_URL=${WEBKIT_DOWNLOAD_URL}
|
||||
WEBKIT_VERSION=${WEBKIT_VERSION}
|
||||
ZIG_COMMIT=${ZIG_COMMIT}
|
||||
${BUN_EXECUTABLE} ${BUN_FLAGS} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
|
||||
${BUN_EXECUTABLE} ${CWD}/scripts/create-link-metadata.mjs ${BUILD_PATH} ${bun}
|
||||
SOURCES
|
||||
${BUN_ZIG_OUTPUT}
|
||||
${BUN_CPP_OUTPUT}
|
||||
@@ -1253,7 +1225,6 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudtd.lib
|
||||
${WEBKIT_LIB_PATH}/sicuind.lib
|
||||
${WEBKIT_LIB_PATH}/sicuucd.lib
|
||||
@@ -1262,7 +1233,6 @@ if(WIN32)
|
||||
target_link_libraries(${bun} PRIVATE
|
||||
${WEBKIT_LIB_PATH}/WTF.lib
|
||||
${WEBKIT_LIB_PATH}/JavaScriptCore.lib
|
||||
${WEBKIT_LIB_PATH}/bmalloc.lib
|
||||
${WEBKIT_LIB_PATH}/sicudt.lib
|
||||
${WEBKIT_LIB_PATH}/sicuin.lib
|
||||
${WEBKIT_LIB_PATH}/sicuuc.lib
|
||||
|
||||
@@ -20,15 +20,6 @@ set(HIGHWAY_CMAKE_ARGS
|
||||
-DHWY_ENABLE_INSTALL=OFF
|
||||
)
|
||||
|
||||
# On Windows ARM64 with clang-cl, the __ARM_NEON macro isn't defined by default
|
||||
# but NEON intrinsics are supported. Define it so Highway can detect NEON support.
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
list(APPEND HIGHWAY_CMAKE_ARGS
|
||||
-DCMAKE_C_FLAGS=-D__ARM_NEON=1
|
||||
-DCMAKE_CXX_FLAGS=-D__ARM_NEON=1
|
||||
)
|
||||
endif()
|
||||
|
||||
register_cmake_command(
|
||||
TARGET
|
||||
highway
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
cloudflare/lol-html
|
||||
COMMIT
|
||||
e9e16dca48dd4a8ffbc77642bc4be60407585f11
|
||||
d64457d9ff0143deef025d5df7e8586092b9afb7
|
||||
)
|
||||
|
||||
set(LOLHTML_CWD ${VENDOR_PATH}/lolhtml/c-api)
|
||||
@@ -33,37 +33,6 @@ if (NOT WIN32)
|
||||
set(RUSTFLAGS "-Cpanic=abort-Cdebuginfo=0-Cforce-unwind-tables=no-Copt-level=s")
|
||||
endif()
|
||||
|
||||
# On Windows, ensure MSVC link.exe is used instead of Git's link.exe
|
||||
set(LOLHTML_ENV
|
||||
CARGO_TERM_COLOR=always
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
CARGO_HOME=${CARGO_HOME}
|
||||
RUSTUP_HOME=${RUSTUP_HOME}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
# On Windows, tell Rust to use MSVC link.exe directly via the target-specific linker env var.
|
||||
# This avoids Git's /usr/bin/link being found first in PATH.
|
||||
# Find the MSVC link.exe from Visual Studio installation
|
||||
file(GLOB MSVC_VERSIONS "C:/Program Files/Microsoft Visual Studio/2022/*/VC/Tools/MSVC/*")
|
||||
if(MSVC_VERSIONS)
|
||||
list(GET MSVC_VERSIONS -1 MSVC_LATEST) # Get the latest version
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64")
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/HostARM64/arm64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_AARCH64_PC_WINDOWS_MSVC_LINKER")
|
||||
else()
|
||||
set(MSVC_LINK_PATH "${MSVC_LATEST}/bin/Hostx64/x64/link.exe")
|
||||
set(CARGO_LINKER_VAR "CARGO_TARGET_X86_64_PC_WINDOWS_MSVC_LINKER")
|
||||
endif()
|
||||
if(EXISTS "${MSVC_LINK_PATH}")
|
||||
list(APPEND LOLHTML_ENV "${CARGO_LINKER_VAR}=${MSVC_LINK_PATH}")
|
||||
message(STATUS "lolhtml: Using MSVC link.exe: ${MSVC_LINK_PATH}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
lolhtml
|
||||
@@ -76,7 +45,12 @@ register_command(
|
||||
ARTIFACTS
|
||||
${LOLHTML_LIBRARY}
|
||||
ENVIRONMENT
|
||||
${LOLHTML_ENV}
|
||||
CARGO_TERM_COLOR=always
|
||||
CARGO_TERM_VERBOSE=true
|
||||
CARGO_TERM_DIAGNOSTIC=true
|
||||
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
|
||||
CARGO_HOME=${CARGO_HOME}
|
||||
RUSTUP_HOME=${RUSTUP_HOME}
|
||||
)
|
||||
|
||||
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/mimalloc
|
||||
COMMIT
|
||||
ffa38ab8ac914f9eb7af75c1f8ad457643dc14f2
|
||||
1beadf9651a7bfdec6b5367c380ecc3fe1c40d1a
|
||||
)
|
||||
|
||||
set(MIMALLOC_CMAKE_ARGS
|
||||
@@ -14,7 +14,7 @@ set(MIMALLOC_CMAKE_ARGS
|
||||
-DMI_BUILD_TESTS=OFF
|
||||
-DMI_USE_CXX=ON
|
||||
-DMI_SKIP_COLLECT_ON_EXIT=ON
|
||||
|
||||
|
||||
# ```
|
||||
# ❯ mimalloc_allow_large_os_pages=0 BUN_PORT=3004 mem bun http-hello.js
|
||||
# Started development server: http://localhost:3004
|
||||
@@ -51,7 +51,7 @@ if(ENABLE_ASAN)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_DEBUG_UBSAN=ON)
|
||||
elseif(APPLE OR LINUX)
|
||||
if(APPLE)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OVERRIDE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_ZONE=OFF)
|
||||
list(APPEND MIMALLOC_CMAKE_ARGS -DMI_OSX_INTERPOSE=OFF)
|
||||
else()
|
||||
@@ -77,9 +77,9 @@ endif()
|
||||
|
||||
if(WIN32)
|
||||
if(DEBUG)
|
||||
set(MIMALLOC_LIBRARY mimalloc-debug)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static-debug)
|
||||
else()
|
||||
set(MIMALLOC_LIBRARY mimalloc)
|
||||
set(MIMALLOC_LIBRARY mimalloc-static)
|
||||
endif()
|
||||
elseif(DEBUG)
|
||||
if (ENABLE_ASAN)
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/tinycc
|
||||
COMMIT
|
||||
12882eee073cfe5c7621bcfadf679e1372d4537b
|
||||
29985a3b59898861442fa3b43f663fc1af2591d7
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
set(CMAKE_SYSTEM_NAME Windows)
|
||||
set(CMAKE_SYSTEM_PROCESSOR aarch64)
|
||||
|
||||
set(CMAKE_C_COMPILER_WORKS ON)
|
||||
set(CMAKE_CXX_COMPILER_WORKS ON)
|
||||
|
||||
# Force ARM64 architecture ID - this is what CMake uses to determine /machine: flag
|
||||
set(MSVC_C_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
set(MSVC_CXX_ARCHITECTURE_ID ARM64 CACHE INTERNAL "")
|
||||
|
||||
# CMake 4.0+ policy CMP0197 controls how MSVC machine type flags are handled
|
||||
set(CMAKE_POLICY_DEFAULT_CMP0197 NEW CACHE INTERNAL "")
|
||||
|
||||
# Clear any inherited static linker flags that might have wrong machine types
|
||||
set(CMAKE_STATIC_LINKER_FLAGS "" CACHE STRING "" FORCE)
|
||||
|
||||
# Use wrapper script for llvm-lib that strips /machine:x64 flags
|
||||
# This works around CMake 4.1.0 bug where both ARM64 and x64 machine flags are added
|
||||
get_filename_component(_TOOLCHAIN_DIR "${CMAKE_CURRENT_LIST_DIR}" DIRECTORY)
|
||||
set(CMAKE_AR "${_TOOLCHAIN_DIR}/scripts/llvm-lib-wrapper.bat" CACHE FILEPATH "" FORCE)
|
||||
@@ -6,8 +6,7 @@ endif()
|
||||
|
||||
optionx(BUILDKITE_ORGANIZATION_SLUG STRING "The organization slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_PIPELINE_SLUG STRING "The pipeline slug to use on Buildkite" DEFAULT "bun")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID (UUID) to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_NUMBER STRING "The build number to use on Buildkite")
|
||||
optionx(BUILDKITE_BUILD_ID STRING "The build ID to use on Buildkite")
|
||||
optionx(BUILDKITE_GROUP_ID STRING "The group ID to use on Buildkite")
|
||||
|
||||
if(ENABLE_BASELINE)
|
||||
@@ -33,13 +32,7 @@ if(NOT BUILDKITE_BUILD_ID)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Use BUILDKITE_BUILD_NUMBER for the URL if available, as the UUID format causes a 302 redirect
|
||||
# that CMake's file(DOWNLOAD) doesn't follow, resulting in empty response.
|
||||
if(BUILDKITE_BUILD_NUMBER)
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_NUMBER})
|
||||
else()
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
endif()
|
||||
setx(BUILDKITE_BUILD_URL https://buildkite.com/${BUILDKITE_ORGANIZATION_SLUG}/${BUILDKITE_PIPELINE_SLUG}/builds/${BUILDKITE_BUILD_ID})
|
||||
setx(BUILDKITE_BUILD_PATH ${BUILDKITE_BUILDS_PATH}/builds/${BUILDKITE_BUILD_ID})
|
||||
|
||||
file(
|
||||
@@ -55,16 +48,8 @@ if(NOT BUILDKITE_BUILD_STATUS EQUAL 0)
|
||||
endif()
|
||||
|
||||
file(READ ${BUILDKITE_BUILD_PATH}/build.json BUILDKITE_BUILD)
|
||||
# CMake's string(JSON ...) interprets escape sequences like \n, \r, \t.
|
||||
# We need to escape these specific sequences while preserving valid JSON escapes like \" and \\.
|
||||
# Strategy: Use a unique placeholder to protect \\ sequences, escape \n/\r/\t, then restore \\.
|
||||
# This prevents \\n (literal backslash + n) from being corrupted to \\\n.
|
||||
set(BKSLASH_PLACEHOLDER "___BKSLASH_PLACEHOLDER_7f3a9b2c___")
|
||||
string(REPLACE "\\\\" "${BKSLASH_PLACEHOLDER}" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\n" "\\\\n" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\r" "\\\\r" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "\\t" "\\\\t" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
string(REPLACE "${BKSLASH_PLACEHOLDER}" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
# Escape backslashes so CMake doesn't interpret JSON escape sequences (e.g., \n in commit messages)
|
||||
string(REPLACE "\\" "\\\\" BUILDKITE_BUILD "${BUILDKITE_BUILD}")
|
||||
|
||||
string(JSON BUILDKITE_BUILD_UUID GET ${BUILDKITE_BUILD} id)
|
||||
string(JSON BUILDKITE_JOBS GET ${BUILDKITE_BUILD} jobs)
|
||||
|
||||
@@ -17,14 +17,6 @@ if (NOT CI)
|
||||
set(BUN_EXECUTABLE ${BUN_EXECUTABLE} CACHE FILEPATH "Bun executable" FORCE)
|
||||
endif()
|
||||
|
||||
# On Windows ARM64, we need to add --smol flag to avoid crashes when running
|
||||
# x64 bun under WoW64 emulation
|
||||
if(WIN32 AND ARCH STREQUAL "aarch64")
|
||||
set(BUN_FLAGS "--smol" CACHE STRING "Extra flags for bun executable")
|
||||
else()
|
||||
set(BUN_FLAGS "" CACHE STRING "Extra flags for bun executable")
|
||||
endif()
|
||||
|
||||
# If this is not set, some advanced features are not checked.
|
||||
# https://github.com/oven-sh/bun/blob/cd7f6a1589db7f1e39dc4e3f4a17234afbe7826c/src/bun.js/javascript.zig#L1069-L1072
|
||||
setenv(BUN_GARBAGE_COLLECTOR_LEVEL 1)
|
||||
|
||||
@@ -12,13 +12,7 @@ if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# LLVM 21 is required for Windows ARM64 (first version with ARM64 Windows builds)
|
||||
# Other platforms use LLVM 19.1.7
|
||||
if(WIN32 AND CMAKE_SYSTEM_PROCESSOR MATCHES "ARM64|aarch64|AARCH64")
|
||||
set(DEFAULT_LLVM_VERSION "21.1.8")
|
||||
else()
|
||||
set(DEFAULT_LLVM_VERSION "19.1.7")
|
||||
endif()
|
||||
set(DEFAULT_LLVM_VERSION "19.1.7")
|
||||
|
||||
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
|
||||
|
||||
|
||||
@@ -31,6 +31,13 @@ execute_process(
|
||||
ERROR_QUIET
|
||||
)
|
||||
|
||||
if(MACOS_VERSION VERSION_LESS ${CMAKE_OSX_DEPLOYMENT_TARGET})
|
||||
message(FATAL_ERROR "Your computer is running macOS ${MACOS_VERSION}, which is older than the target macOS SDK ${CMAKE_OSX_DEPLOYMENT_TARGET}. To fix this, either:\n"
|
||||
" - Upgrade your computer to macOS ${CMAKE_OSX_DEPLOYMENT_TARGET} or newer\n"
|
||||
" - Download a newer version of the macOS SDK from Apple: https://developer.apple.com/download/all/?q=xcode\n"
|
||||
" - Set -DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOS_VERSION}\n")
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND xcrun --sdk macosx --show-sdk-path
|
||||
OUTPUT_VARIABLE DEFAULT_CMAKE_OSX_SYSROOT
|
||||
|
||||
@@ -2,14 +2,10 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 9a2cc42ae1bf693a0fd0ceb9b1d7d965d9cfd3ea)
|
||||
set(WEBKIT_VERSION c4d4cae03ecef2791f7ad5dd795f722d8be87d41)
|
||||
endif()
|
||||
|
||||
# Use preview build URL for Windows ARM64 until the fix is merged to main
|
||||
set(WEBKIT_PREVIEW_PR 140)
|
||||
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 16 WEBKIT_VERSION_PREFIX)
|
||||
string(SUBSTRING ${WEBKIT_VERSION} 0 8 WEBKIT_VERSION_SHORT)
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
set(DEFAULT_WEBKIT_PATH ${VENDOR_PATH}/WebKit/WebKitBuild/${CMAKE_BUILD_TYPE})
|
||||
@@ -40,22 +36,6 @@ if(WEBKIT_LOCAL)
|
||||
${WEBKIT_PATH}/JavaScriptCore/PrivateHeaders/JavaScriptCore
|
||||
${WEBKIT_PATH}/JavaScriptCore/DerivedSources/inspector
|
||||
)
|
||||
|
||||
# On Windows, add ICU include path from vcpkg
|
||||
if(WIN32)
|
||||
# Auto-detect vcpkg triplet
|
||||
set(VCPKG_ARM64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/arm64-windows-static)
|
||||
set(VCPKG_X64_PATH ${VENDOR_PATH}/WebKit/vcpkg_installed/x64-windows-static)
|
||||
if(EXISTS ${VCPKG_ARM64_PATH})
|
||||
set(VCPKG_ICU_PATH ${VCPKG_ARM64_PATH})
|
||||
else()
|
||||
set(VCPKG_ICU_PATH ${VCPKG_X64_PATH})
|
||||
endif()
|
||||
if(EXISTS ${VCPKG_ICU_PATH}/include)
|
||||
include_directories(${VCPKG_ICU_PATH}/include)
|
||||
message(STATUS "Using ICU from vcpkg: ${VCPKG_ICU_PATH}/include")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# After this point, only prebuilt WebKit is supported
|
||||
@@ -72,7 +52,7 @@ else()
|
||||
message(FATAL_ERROR "Unsupported operating system: ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
|
||||
set(WEBKIT_ARCH "arm64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
set(WEBKIT_ARCH "amd64")
|
||||
@@ -101,14 +81,7 @@ endif()
|
||||
|
||||
setx(WEBKIT_NAME bun-webkit-${WEBKIT_OS}-${WEBKIT_ARCH}${WEBKIT_SUFFIX})
|
||||
set(WEBKIT_FILENAME ${WEBKIT_NAME}.tar.gz)
|
||||
|
||||
if(WEBKIT_VERSION MATCHES "^autobuild-")
|
||||
set(WEBKIT_TAG ${WEBKIT_VERSION})
|
||||
else()
|
||||
set(WEBKIT_TAG autobuild-${WEBKIT_VERSION})
|
||||
endif()
|
||||
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/${WEBKIT_TAG}/${WEBKIT_FILENAME})
|
||||
setx(WEBKIT_DOWNLOAD_URL https://github.com/oven-sh/WebKit/releases/download/autobuild-${WEBKIT_VERSION}/${WEBKIT_FILENAME})
|
||||
|
||||
if(EXISTS ${WEBKIT_PATH}/package.json)
|
||||
file(READ ${WEBKIT_PATH}/package.json WEBKIT_PACKAGE_JSON)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|ARM64|aarch64|AARCH64")
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm64|aarch64")
|
||||
set(DEFAULT_ZIG_ARCH "aarch64")
|
||||
elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64|x64|AMD64")
|
||||
set(DEFAULT_ZIG_ARCH "x86_64")
|
||||
|
||||
@@ -365,23 +365,6 @@ The `--bytecode` argument enables bytecode compilation. Every time you run JavaS
|
||||
console.log(process.execArgv); // ["--smol", "--user-agent=MyBot"]
|
||||
```
|
||||
|
||||
### Runtime arguments via `BUN_OPTIONS`
|
||||
|
||||
The `BUN_OPTIONS` environment variable is applied to standalone executables, allowing you to pass runtime flags without recompiling:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
# Enable CPU profiling on a compiled executable
|
||||
BUN_OPTIONS="--cpu-prof" ./myapp
|
||||
|
||||
# Enable heap profiling with markdown output
|
||||
BUN_OPTIONS="--heap-prof-md" ./myapp
|
||||
|
||||
# Combine multiple flags
|
||||
BUN_OPTIONS="--smol --cpu-prof-md" ./myapp
|
||||
```
|
||||
|
||||
This is useful for debugging or profiling production executables without rebuilding them.
|
||||
|
||||
---
|
||||
|
||||
## Automatic config loading
|
||||
|
||||
@@ -1333,50 +1333,6 @@ Generate metadata about the build in a structured format. The metafile contains
|
||||
</Tab>
|
||||
</Tabs>
|
||||
|
||||
#### Markdown metafile
|
||||
|
||||
Use `--metafile-md` to generate a markdown metafile, which is LLM-friendly and easy to read in the terminal:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
Both `--metafile` and `--metafile-md` can be used together:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build ./src/index.ts --outdir ./dist --metafile ./dist/meta.json --metafile-md ./dist/meta.md
|
||||
```
|
||||
|
||||
#### `metafile` option formats
|
||||
|
||||
In the JavaScript API, `metafile` accepts several forms:
|
||||
|
||||
```ts title="build.ts" icon="/icons/typescript.svg"
|
||||
// Boolean — include metafile in the result object
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: true,
|
||||
});
|
||||
|
||||
// String — write JSON metafile to a specific path
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: "./dist/meta.json",
|
||||
});
|
||||
|
||||
// Object — specify separate paths for JSON and markdown output
|
||||
await Bun.build({
|
||||
entrypoints: ["./src/index.ts"],
|
||||
outdir: "./dist",
|
||||
metafile: {
|
||||
json: "./dist/meta.json",
|
||||
markdown: "./dist/meta.md",
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The metafile structure contains:
|
||||
|
||||
```ts
|
||||
|
||||
@@ -150,9 +150,6 @@
|
||||
"/runtime/secrets",
|
||||
"/runtime/console",
|
||||
"/runtime/yaml",
|
||||
"/runtime/markdown",
|
||||
"/runtime/json5",
|
||||
"/runtime/jsonl",
|
||||
"/runtime/html-rewriter",
|
||||
"/runtime/hashing",
|
||||
"/runtime/glob",
|
||||
@@ -500,7 +497,6 @@
|
||||
"/guides/runtime/import-json",
|
||||
"/guides/runtime/import-toml",
|
||||
"/guides/runtime/import-yaml",
|
||||
"/guides/runtime/import-json5",
|
||||
"/guides/runtime/import-html",
|
||||
"/guides/util/import-meta-dir",
|
||||
"/guides/util/import-meta-file",
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
---
|
||||
title: Import a JSON5 file
|
||||
sidebarTitle: Import JSON5
|
||||
mode: center
|
||||
---
|
||||
|
||||
Bun natively supports `.json5` imports.
|
||||
|
||||
```json5 config.json5 icon="file-code"
|
||||
{
|
||||
// Comments are allowed
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
server: {
|
||||
port: 3000,
|
||||
timeout: 30,
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Import the file like any other source file.
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
config.database.host; // => "localhost"
|
||||
config.server.port; // => 3000
|
||||
config.features.auth; // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
You can also use named imports to destructure top-level properties:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
import { database, server, features } from "./config.json5";
|
||||
|
||||
console.log(database.name); // => "myapp"
|
||||
console.log(server.timeout); // => 30
|
||||
console.log(features.rateLimit); // => true
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
For parsing JSON5 strings at runtime, use `Bun.JSON5.parse()`:
|
||||
|
||||
```ts config.ts icon="/icons/typescript.svg"
|
||||
const data = JSON5.parse(`{
|
||||
name: 'John Doe',
|
||||
age: 30,
|
||||
hobbies: [
|
||||
'reading',
|
||||
'coding',
|
||||
],
|
||||
}`);
|
||||
|
||||
console.log(data.name); // => "John Doe"
|
||||
console.log(data.hobbies); // => ["reading", "coding"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
See [Docs > API > JSON5](/runtime/json5) for complete documentation on JSON5 support in Bun.
|
||||
@@ -227,26 +227,6 @@ bun --cpu-prof script.js
|
||||
|
||||
This generates a `.cpuprofile` file you can open in Chrome DevTools (Performance tab → Load profile) or VS Code's CPU profiler.
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--cpu-prof-md` to generate a markdown CPU profile, which is grep-friendly and designed for LLM analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
Both `--cpu-prof` and `--cpu-prof-md` can be used together to generate both formats at once:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --cpu-prof --cpu-prof-md script.js
|
||||
```
|
||||
|
||||
You can also trigger profiling via the `BUN_OPTIONS` environment variable:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
BUN_OPTIONS="--cpu-prof-md" bun script.js
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
@@ -254,43 +234,8 @@ bun --cpu-prof --cpu-prof-name my-profile.cpuprofile script.js
|
||||
bun --cpu-prof --cpu-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ---------------------------- | ----------------------------------------------------------- |
|
||||
| `--cpu-prof` | Generate a `.cpuprofile` JSON file (Chrome DevTools format) |
|
||||
| `--cpu-prof-md` | Generate a markdown CPU profile (grep/LLM-friendly) |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
|
||||
## Heap profiling
|
||||
|
||||
Generate heap snapshots on exit to analyze memory usage and find memory leaks.
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof script.js
|
||||
```
|
||||
|
||||
This generates a V8 `.heapsnapshot` file that can be loaded in Chrome DevTools (Memory tab → Load).
|
||||
|
||||
### Markdown output
|
||||
|
||||
Use `--heap-prof-md` to generate a markdown heap profile for CLI analysis:
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof-md script.js
|
||||
```
|
||||
|
||||
<Note>If both `--heap-prof` and `--heap-prof-md` are specified, the markdown format is used.</Note>
|
||||
|
||||
### Options
|
||||
|
||||
```sh terminal icon="terminal"
|
||||
bun --heap-prof --heap-prof-name my-snapshot.heapsnapshot script.js
|
||||
bun --heap-prof --heap-prof-dir ./profiles script.js
|
||||
```
|
||||
|
||||
| Flag | Description |
|
||||
| ----------------------------- | ------------------------------------------ |
|
||||
| `--heap-prof` | Generate a V8 `.heapsnapshot` file on exit |
|
||||
| `--heap-prof-md` | Generate a markdown heap profile on exit |
|
||||
| `--heap-prof-name <filename>` | Set output filename |
|
||||
| `--heap-prof-dir <dir>` | Set output directory |
|
||||
| Flag | Description |
|
||||
| ---------------------------- | -------------------- |
|
||||
| `--cpu-prof` | Enable profiling |
|
||||
| `--cpu-prof-name <filename>` | Set output filename |
|
||||
| `--cpu-prof-dir <dir>` | Set output directory |
|
||||
|
||||
@@ -35,7 +35,7 @@ winget install "Visual Studio Community 2022" --override "--add Microsoft.Visual
|
||||
|
||||
After Visual Studio, you need the following:
|
||||
|
||||
- LLVM (19.1.7 for x64, 21.1.8 for ARM64)
|
||||
- LLVM 19.1.7
|
||||
- Go
|
||||
- Rust
|
||||
- NASM
|
||||
@@ -47,35 +47,25 @@ After Visual Studio, you need the following:
|
||||
|
||||
[Scoop](https://scoop.sh) can be used to install these remaining tools easily.
|
||||
|
||||
```ps1 Scoop (x64)
|
||||
```ps1 Scoop
|
||||
irm https://get.scoop.sh | iex
|
||||
scoop install nodejs-lts go rust nasm ruby perl ccache
|
||||
# scoop seems to be buggy if you install llvm and the rest at the same time
|
||||
scoop install llvm@19.1.7
|
||||
```
|
||||
|
||||
For Windows ARM64, download LLVM 21.1.8 directly from GitHub releases (first version with ARM64 Windows builds):
|
||||
|
||||
```ps1 ARM64
|
||||
# Download and install LLVM for ARM64
|
||||
Invoke-WebRequest -Uri "https://github.com/llvm/llvm-project/releases/download/llvmorg-21.1.8/LLVM-21.1.8-woa64.exe" -OutFile "$env:TEMP\LLVM-21.1.8-woa64.exe"
|
||||
Start-Process -FilePath "$env:TEMP\LLVM-21.1.8-woa64.exe" -ArgumentList "/S" -Wait
|
||||
```
|
||||
|
||||
<Note>
|
||||
Please do not use WinGet/other package manager for these, as you will likely install Strawberry Perl instead of a more
|
||||
minimal installation of Perl. Strawberry Perl includes many other utilities that get installed into `$Env:PATH` that
|
||||
will conflict with MSVC and break the build.
|
||||
</Note>
|
||||
|
||||
If you intend on building WebKit locally (optional, x64 only), you should install these packages:
|
||||
If you intend on building WebKit locally (optional), you should install these packages:
|
||||
|
||||
```ps1 Scoop
|
||||
scoop install make cygwin python
|
||||
```
|
||||
|
||||
<Note>Cygwin is not required for ARM64 builds as WebKit is provided as a pre-built binary.</Note>
|
||||
|
||||
From here on out, it is **expected you use a PowerShell Terminal with `.\scripts\vs-shell.ps1` sourced**. This script is available in the Bun repository and can be loaded by executing it:
|
||||
|
||||
```ps1
|
||||
|
||||
@@ -55,5 +55,5 @@ Click the link in the right column to jump to the associated documentation.
|
||||
| Stream Processing | [`Bun.readableStreamTo*()`](/runtime/utils#bun-readablestreamto), `Bun.readableStreamToBytes()`, `Bun.readableStreamToBlob()`, `Bun.readableStreamToFormData()`, `Bun.readableStreamToJSON()`, `Bun.readableStreamToArray()` |
|
||||
| Memory & Buffer Management | `Bun.ArrayBufferSink`, `Bun.allocUnsafe`, `Bun.concatArrayBuffers` |
|
||||
| Module Resolution | [`Bun.resolveSync()`](/runtime/utils#bun-resolvesync) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.markdown`](/runtime/markdown), [`Bun.color`](/runtime/color) |
|
||||
| Parsing & Formatting | [`Bun.semver`](/runtime/semver), `Bun.TOML.parse`, [`Bun.color`](/runtime/color) |
|
||||
| Low-level / Internals | `Bun.mmap`, `Bun.gc`, `Bun.generateHeapSnapshot`, [`bun:jsc`](https://bun.com/reference/bun/jsc) |
|
||||
|
||||
@@ -5,7 +5,7 @@ description: "File types and loaders supported by Bun's bundler and runtime"
|
||||
|
||||
The Bun bundler implements a set of default loaders out of the box. As a rule of thumb, the bundler and the runtime both support the same set of file types out of the box.
|
||||
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.json5` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
`.js` `.cjs` `.mjs` `.mts` `.cts` `.ts` `.tsx` `.jsx` `.css` `.json` `.jsonc` `.toml` `.yaml` `.yml` `.txt` `.wasm` `.node` `.html` `.sh`
|
||||
|
||||
Bun uses the file extension to determine which built-in _loader_ should be used to parse the file. Every loader has a name, such as `js`, `tsx`, or `json`. These names are used when building [plugins](/bundler/plugins) that extend Bun with custom loaders.
|
||||
|
||||
@@ -197,53 +197,6 @@ export default {
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `json5`
|
||||
|
||||
**JSON5 loader**. Default for `.json5`.
|
||||
|
||||
JSON5 files can be directly imported. Bun will parse them with its fast native JSON5 parser. JSON5 is a superset of JSON that supports comments, trailing commas, unquoted keys, single-quoted strings, and more.
|
||||
|
||||
```ts
|
||||
import config from "./config.json5";
|
||||
console.log(config);
|
||||
|
||||
// via import attribute:
|
||||
import data from "./data.txt" with { type: "json5" };
|
||||
```
|
||||
|
||||
During bundling, the parsed JSON5 is inlined into the bundle as a JavaScript object.
|
||||
|
||||
```ts
|
||||
var config = {
|
||||
name: "my-app",
|
||||
version: "1.0.0",
|
||||
// ...other fields
|
||||
};
|
||||
```
|
||||
|
||||
If a `.json5` file is passed as an entrypoint, it will be converted to a `.js` module that `export default`s the parsed object.
|
||||
|
||||
<CodeGroup>
|
||||
|
||||
```json5 Input
|
||||
{
|
||||
// Configuration
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
}
|
||||
```
|
||||
|
||||
```ts Output
|
||||
export default {
|
||||
name: "John Doe",
|
||||
age: 35,
|
||||
email: "johndoe@example.com",
|
||||
};
|
||||
```
|
||||
|
||||
</CodeGroup>
|
||||
|
||||
### `text`
|
||||
|
||||
**Text loader**. Default for `.txt`.
|
||||
|
||||
@@ -1,271 +0,0 @@
|
||||
---
|
||||
title: JSON5
|
||||
description: Use Bun's built-in support for JSON5 files through both runtime APIs and bundler integration
|
||||
---
|
||||
|
||||
In Bun, JSON5 is a first-class citizen alongside JSON, TOML, and YAML. You can:
|
||||
|
||||
- Parse and stringify JSON5 with `Bun.JSON5.parse` and `Bun.JSON5.stringify`
|
||||
- `import` & `require` JSON5 files as modules at runtime (including hot reloading & watch mode support)
|
||||
- `import` & `require` JSON5 files in frontend apps via Bun's bundler
|
||||
|
||||
---
|
||||
|
||||
## Conformance
|
||||
|
||||
Bun's JSON5 parser passes 100% of the [official JSON5 test suite](https://github.com/json5/json5-tests). The parser is written in Zig for optimal performance. You can view our [translated test suite](https://github.com/oven-sh/bun/blob/main/test/js/bun/json5/json5-test-suite.test.ts) to see every test case.
|
||||
|
||||
---
|
||||
|
||||
## Runtime API
|
||||
|
||||
### `Bun.JSON5.parse()`
|
||||
|
||||
Parse a JSON5 string into a JavaScript value.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const data = JSON5.parse(`{
|
||||
// JSON5 supports comments
|
||||
name: 'my-app',
|
||||
version: '1.0.0',
|
||||
debug: true,
|
||||
|
||||
// trailing commas are allowed
|
||||
tags: ['web', 'api',],
|
||||
}`);
|
||||
|
||||
console.log(data);
|
||||
// {
|
||||
// name: "my-app",
|
||||
// version: "1.0.0",
|
||||
// debug: true,
|
||||
// tags: ["web", "api"]
|
||||
// }
|
||||
```
|
||||
|
||||
#### Supported JSON5 Features
|
||||
|
||||
JSON5 is a superset of JSON based on ECMAScript 5.1 syntax. It supports:
|
||||
|
||||
- **Comments**: single-line (`//`) and multi-line (`/* */`)
|
||||
- **Trailing commas**: in objects and arrays
|
||||
- **Unquoted keys**: valid ECMAScript 5.1 identifiers can be used as keys
|
||||
- **Single-quoted strings**: in addition to double-quoted strings
|
||||
- **Multi-line strings**: using backslash line continuations
|
||||
- **Hex numbers**: `0xFF`
|
||||
- **Leading & trailing decimal points**: `.5` and `5.`
|
||||
- **Infinity and NaN**: positive and negative
|
||||
- **Explicit plus sign**: `+42`
|
||||
|
||||
```ts
|
||||
const data = JSON5.parse(`{
|
||||
// Unquoted keys
|
||||
unquoted: 'keys work',
|
||||
|
||||
// Single and double quotes
|
||||
single: 'single-quoted',
|
||||
double: "double-quoted",
|
||||
|
||||
// Trailing commas
|
||||
trailing: 'comma',
|
||||
|
||||
// Special numbers
|
||||
hex: 0xDEADbeef,
|
||||
half: .5,
|
||||
to: Infinity,
|
||||
nan: NaN,
|
||||
|
||||
// Multi-line strings
|
||||
multiline: 'line 1 \
|
||||
line 2',
|
||||
}`);
|
||||
```
|
||||
|
||||
#### Error Handling
|
||||
|
||||
`Bun.JSON5.parse()` throws a `SyntaxError` if the input is invalid JSON5:
|
||||
|
||||
```ts
|
||||
try {
|
||||
JSON5.parse("{invalid}");
|
||||
} catch (error) {
|
||||
console.error("Failed to parse JSON5:", error.message);
|
||||
}
|
||||
```
|
||||
|
||||
### `Bun.JSON5.stringify()`
|
||||
|
||||
Stringify a JavaScript value to a JSON5 string.
|
||||
|
||||
```ts
|
||||
import { JSON5 } from "bun";
|
||||
|
||||
const str = JSON5.stringify({ name: "my-app", version: "1.0.0" });
|
||||
console.log(str);
|
||||
// {name:'my-app',version:'1.0.0'}
|
||||
```
|
||||
|
||||
#### Pretty Printing
|
||||
|
||||
Pass a `space` argument to format the output with indentation:
|
||||
|
||||
```ts
|
||||
const pretty = JSON5.stringify(
|
||||
{
|
||||
name: "my-app",
|
||||
debug: true,
|
||||
tags: ["web", "api"],
|
||||
},
|
||||
null,
|
||||
2,
|
||||
);
|
||||
|
||||
console.log(pretty);
|
||||
// {
|
||||
// name: 'my-app',
|
||||
// debug: true,
|
||||
// tags: [
|
||||
// 'web',
|
||||
// 'api',
|
||||
// ],
|
||||
// }
|
||||
```
|
||||
|
||||
The `space` argument can be a number (number of spaces) or a string (used as the indent character):
|
||||
|
||||
```ts
|
||||
// Tab indentation
|
||||
JSON5.stringify(data, null, "\t");
|
||||
```
|
||||
|
||||
#### Special Values
|
||||
|
||||
Unlike `JSON.stringify`, `JSON5.stringify` preserves special numeric values:
|
||||
|
||||
```ts
|
||||
JSON5.stringify({ inf: Infinity, ninf: -Infinity, nan: NaN });
|
||||
// {inf:Infinity,ninf:-Infinity,nan:NaN}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Module Import
|
||||
|
||||
### ES Modules
|
||||
|
||||
You can import JSON5 files directly as ES modules:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
// Database configuration
|
||||
database: {
|
||||
host: "localhost",
|
||||
port: 5432,
|
||||
name: "myapp",
|
||||
},
|
||||
|
||||
features: {
|
||||
auth: true,
|
||||
rateLimit: true,
|
||||
analytics: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
#### Default Import
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import config from "./config.json5";
|
||||
|
||||
console.log(config.database.host); // "localhost"
|
||||
console.log(config.features.auth); // true
|
||||
```
|
||||
|
||||
#### Named Imports
|
||||
|
||||
You can destructure top-level properties as named imports:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
import { database, features } from "./config.json5";
|
||||
|
||||
console.log(database.host); // "localhost"
|
||||
console.log(features.rateLimit); // true
|
||||
```
|
||||
|
||||
### CommonJS
|
||||
|
||||
JSON5 files can also be required in CommonJS:
|
||||
|
||||
```ts app.ts icon="/icons/typescript.svg"
|
||||
const config = require("./config.json5");
|
||||
console.log(config.database.name); // "myapp"
|
||||
|
||||
// Destructuring also works
|
||||
const { database, features } = require("./config.json5");
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Hot Reloading with JSON5
|
||||
|
||||
When you run your application with `bun --hot`, changes to JSON5 files are automatically detected and reloaded:
|
||||
|
||||
```json5 config.json5
|
||||
{
|
||||
server: {
|
||||
port: 3000,
|
||||
host: "localhost",
|
||||
},
|
||||
features: {
|
||||
debug: true,
|
||||
verbose: false,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
```ts server.ts icon="/icons/typescript.svg"
|
||||
import { server, features } from "./config.json5";
|
||||
|
||||
Bun.serve({
|
||||
port: server.port,
|
||||
hostname: server.host,
|
||||
fetch(req) {
|
||||
if (features.verbose) {
|
||||
console.log(`${req.method} ${req.url}`);
|
||||
}
|
||||
return new Response("Hello World");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Run with hot reloading:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun --hot server.ts
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Bundler Integration
|
||||
|
||||
When you import JSON5 files and bundle with Bun, the JSON5 is parsed at build time and included as a JavaScript module:
|
||||
|
||||
```bash terminal icon="terminal"
|
||||
bun build app.ts --outdir=dist
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- Zero runtime JSON5 parsing overhead in production
|
||||
- Smaller bundle sizes
|
||||
- Tree-shaking support for unused properties (named imports)
|
||||
|
||||
### Dynamic Imports
|
||||
|
||||
JSON5 files can be dynamically imported:
|
||||
|
||||
```ts
|
||||
const config = await import("./config.json5");
|
||||
```
|
||||
@@ -1,188 +0,0 @@
|
||||
---
|
||||
title: JSONL
|
||||
description: Parse newline-delimited JSON (JSONL) with Bun's built-in streaming parser
|
||||
---
|
||||
|
||||
Bun has built-in support for parsing [JSONL](https://jsonlines.org/) (newline-delimited JSON), where each line is a separate JSON value. The parser is implemented in C++ using JavaScriptCore's optimized JSON parser and supports streaming use cases.
|
||||
|
||||
```ts
|
||||
const results = Bun.JSONL.parse('{"name":"Alice"}\n{"name":"Bob"}\n');
|
||||
// [{ name: "Alice" }, { name: "Bob" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parse()`
|
||||
|
||||
Parse a complete JSONL input and return an array of all parsed values.
|
||||
|
||||
```ts
|
||||
import { JSONL } from "bun";
|
||||
|
||||
const input = '{"id":1,"name":"Alice"}\n{"id":2,"name":"Bob"}\n{"id":3,"name":"Charlie"}\n';
|
||||
const records = JSONL.parse(input);
|
||||
console.log(records);
|
||||
// [
|
||||
// { id: 1, name: "Alice" },
|
||||
// { id: 2, name: "Bob" },
|
||||
// { id: 3, name: "Charlie" }
|
||||
// ]
|
||||
```
|
||||
|
||||
Input can be a string or a `Uint8Array`:
|
||||
|
||||
```ts
|
||||
const buffer = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
const results = Bun.JSONL.parse(buffer);
|
||||
// [{ a: 1 }, { b: 2 }]
|
||||
```
|
||||
|
||||
When passed a `Uint8Array`, a UTF-8 BOM at the start of the buffer is automatically skipped.
|
||||
|
||||
### Error handling
|
||||
|
||||
If the input contains invalid JSON, `Bun.JSONL.parse()` throws a `SyntaxError`:
|
||||
|
||||
```ts
|
||||
try {
|
||||
Bun.JSONL.parse('{"valid":true}\n{invalid}\n');
|
||||
} catch (error) {
|
||||
console.error(error); // SyntaxError: Failed to parse JSONL
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.JSONL.parseChunk()`
|
||||
|
||||
For streaming scenarios, `parseChunk` parses as many complete values as possible from the input and reports how far it got. This is useful when receiving data incrementally (e.g., from a network stream) and you need to know where to resume parsing.
|
||||
|
||||
```ts
|
||||
const chunk = '{"id":1}\n{"id":2}\n{"id":3';
|
||||
|
||||
const result = Bun.JSONL.parseChunk(chunk);
|
||||
console.log(result.values); // [{ id: 1 }, { id: 2 }]
|
||||
console.log(result.read); // 17 — characters consumed
|
||||
console.log(result.done); // false — incomplete value remains
|
||||
console.log(result.error); // null — no parse error
|
||||
```
|
||||
|
||||
### Return value
|
||||
|
||||
`parseChunk` returns an object with four properties:
|
||||
|
||||
| Property | Type | Description |
|
||||
| -------- | --------------------- | ----------------------------------------------------------------------- |
|
||||
| `values` | `any[]` | Array of successfully parsed JSON values |
|
||||
| `read` | `number` | Number of bytes (for `Uint8Array`) or characters (for strings) consumed |
|
||||
| `done` | `boolean` | `true` if the entire input was consumed with no remaining data |
|
||||
| `error` | `SyntaxError \| null` | Parse error, or `null` if no error occurred |
|
||||
|
||||
### Streaming example
|
||||
|
||||
Use `read` to slice off consumed input and carry forward the remainder:
|
||||
|
||||
```ts
|
||||
let buffer = "";
|
||||
|
||||
async function processStream(stream: ReadableStream<string>) {
|
||||
for await (const chunk of stream) {
|
||||
buffer += chunk;
|
||||
const result = Bun.JSONL.parseChunk(buffer);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep only the unconsumed portion
|
||||
buffer = buffer.slice(result.read);
|
||||
}
|
||||
|
||||
// Handle any remaining data
|
||||
if (buffer.length > 0) {
|
||||
const final = Bun.JSONL.parseChunk(buffer);
|
||||
for (const value of final.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
if (final.error) {
|
||||
console.error("Parse error in final chunk:", final.error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Byte offsets with `Uint8Array`
|
||||
|
||||
When the input is a `Uint8Array`, you can pass optional `start` and `end` byte offsets:
|
||||
|
||||
```ts
|
||||
const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n{"c":3}\n');
|
||||
|
||||
// Parse starting from byte 8
|
||||
const result = Bun.JSONL.parseChunk(buf, 8);
|
||||
console.log(result.values); // [{ b: 2 }, { c: 3 }]
|
||||
console.log(result.read); // 24
|
||||
|
||||
// Parse a specific range
|
||||
const partial = Bun.JSONL.parseChunk(buf, 0, 8);
|
||||
console.log(partial.values); // [{ a: 1 }]
|
||||
```
|
||||
|
||||
The `read` value is always a byte offset into the original buffer, making it easy to use with `TypedArray.subarray()` for zero-copy streaming:
|
||||
|
||||
```ts
|
||||
let buf = new Uint8Array(0);
|
||||
|
||||
async function processBinaryStream(stream: ReadableStream<Uint8Array>) {
|
||||
for await (const chunk of stream) {
|
||||
// Append chunk to buffer
|
||||
const newBuf = new Uint8Array(buf.length + chunk.length);
|
||||
newBuf.set(buf);
|
||||
newBuf.set(chunk, buf.length);
|
||||
buf = newBuf;
|
||||
|
||||
const result = Bun.JSONL.parseChunk(buf);
|
||||
|
||||
for (const value of result.values) {
|
||||
handleRecord(value);
|
||||
}
|
||||
|
||||
// Keep unconsumed bytes
|
||||
buf = buf.slice(result.read);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Error recovery
|
||||
|
||||
Unlike `parse()`, `parseChunk()` does not throw on invalid JSON. Instead, it returns the error in the `error` property, along with any values that were successfully parsed before the error:
|
||||
|
||||
```ts
|
||||
const input = '{"a":1}\n{invalid}\n{"b":2}\n';
|
||||
const result = Bun.JSONL.parseChunk(input);
|
||||
|
||||
console.log(result.values); // [{ a: 1 }] — values parsed before the error
|
||||
console.log(result.error); // SyntaxError
|
||||
console.log(result.read); // 7 — position up to last successful parse
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Supported value types
|
||||
|
||||
Each line can be any valid JSON value, not just objects:
|
||||
|
||||
```ts
|
||||
const input = '42\n"hello"\ntrue\nnull\n[1,2,3]\n{"key":"value"}\n';
|
||||
const values = Bun.JSONL.parse(input);
|
||||
// [42, "hello", true, null, [1, 2, 3], { key: "value" }]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance notes
|
||||
|
||||
- **ASCII fast path**: Pure ASCII input is parsed directly without copying, using a zero-allocation `StringView`.
|
||||
- **UTF-8 support**: Non-ASCII `Uint8Array` input is decoded to UTF-16 using SIMD-accelerated conversion.
|
||||
- **BOM handling**: UTF-8 BOM (`0xEF 0xBB 0xBF`) at the start of a `Uint8Array` is automatically skipped.
|
||||
- **Pre-built object shape**: The result object from `parseChunk` uses a cached structure for fast property access.
|
||||
@@ -1,344 +0,0 @@
|
||||
---
|
||||
title: Markdown
|
||||
description: Parse and render Markdown with Bun's built-in Markdown API, supporting GFM extensions and custom rendering callbacks
|
||||
---
|
||||
|
||||
{% callout type="note" %}
|
||||
**Unstable API** — This API is under active development and may change in future versions of Bun.
|
||||
{% /callout %}
|
||||
|
||||
Bun includes a fast, built-in Markdown parser written in Zig. It supports GitHub Flavored Markdown (GFM) extensions and provides three APIs:
|
||||
|
||||
- `Bun.markdown.html()` — render Markdown to an HTML string
|
||||
- `Bun.markdown.render()` — render Markdown with custom callbacks for each element
|
||||
- `Bun.markdown.react()` — render Markdown to React JSX elements
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.html()`
|
||||
|
||||
Convert a Markdown string to HTML.
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("# Hello **world**");
|
||||
// "<h1>Hello <strong>world</strong></h1>\n"
|
||||
```
|
||||
|
||||
GFM extensions like tables, strikethrough, and task lists are enabled by default:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html(`
|
||||
| Feature | Status |
|
||||
|-------------|--------|
|
||||
| Tables | ~~done~~ |
|
||||
| Strikethrough| ~~done~~ |
|
||||
| Task lists | done |
|
||||
`);
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
Pass an options object as the second argument to configure the parser:
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.html("some markdown", {
|
||||
tables: true, // GFM tables (default: true)
|
||||
strikethrough: true, // GFM strikethrough (default: true)
|
||||
tasklists: true, // GFM task lists (default: true)
|
||||
tagFilter: true, // GFM tag filter for disallowed HTML tags
|
||||
autolinks: true, // Autolink URLs, emails, and www. links
|
||||
});
|
||||
```
|
||||
|
||||
All available options:
|
||||
|
||||
| Option | Default | Description |
|
||||
| ---------------------- | ------- | ----------------------------------------------------------- |
|
||||
| `tables` | `false` | GFM tables |
|
||||
| `strikethrough` | `false` | GFM strikethrough (`~~text~~`) |
|
||||
| `tasklists` | `false` | GFM task lists (`- [x] item`) |
|
||||
| `autolinks` | `false` | Enable autolinks — see [Autolinks](#autolinks) |
|
||||
| `headings` | `false` | Heading IDs and autolinks — see [Heading IDs](#heading-ids) |
|
||||
| `hardSoftBreaks` | `false` | Treat soft line breaks as hard breaks |
|
||||
| `wikiLinks` | `false` | Enable `[[wiki links]]` |
|
||||
| `underline` | `false` | `__text__` renders as `<u>` instead of `<strong>` |
|
||||
| `latexMath` | `false` | Enable `$inline$` and `$$display$$` math |
|
||||
| `collapseWhitespace` | `false` | Collapse whitespace in text |
|
||||
| `permissiveAtxHeaders` | `false` | ATX headers without space after `#` |
|
||||
| `noIndentedCodeBlocks` | `false` | Disable indented code blocks |
|
||||
| `noHtmlBlocks` | `false` | Disable HTML blocks |
|
||||
| `noHtmlSpans` | `false` | Disable inline HTML |
|
||||
| `tagFilter` | `false` | GFM tag filter for disallowed HTML tags |
|
||||
|
||||
#### Autolinks
|
||||
|
||||
Pass `true` to enable all autolink types, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable all autolinks (URL, WWW, email)
|
||||
Bun.markdown.html("Visit www.example.com", { autolinks: true });
|
||||
|
||||
// Enable only specific types
|
||||
Bun.markdown.html("Visit www.example.com", {
|
||||
autolinks: { url: true, www: true },
|
||||
});
|
||||
```
|
||||
|
||||
#### Heading IDs
|
||||
|
||||
Pass `true` to enable both heading IDs and autolink headings, or an object for granular control:
|
||||
|
||||
```ts
|
||||
// Enable heading IDs and autolink headings
|
||||
Bun.markdown.html("## Hello World", { headings: true });
|
||||
// '<h2 id="hello-world"><a href="#hello-world">Hello World</a></h2>\n'
|
||||
|
||||
// Enable only heading IDs (no autolink)
|
||||
Bun.markdown.html("## Hello World", { headings: { ids: true } });
|
||||
// '<h2 id="hello-world">Hello World</h2>\n'
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.render()`
|
||||
|
||||
Parse Markdown and render it using custom JavaScript callbacks. This gives you full control over the output format — you can generate HTML with custom classes, React elements, ANSI terminal output, or any other string format.
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Hello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
strong: children => `<b>${children}</b>`,
|
||||
paragraph: children => `<p>${children}</p>`,
|
||||
});
|
||||
// '<h1 class="title">Hello <b>world</b></h1>'
|
||||
```
|
||||
|
||||
### Callback signature
|
||||
|
||||
Each callback receives:
|
||||
|
||||
1. **`children`** — the accumulated content of the element as a string
|
||||
2. **`meta`** (optional) — an object with element-specific metadata
|
||||
|
||||
Return a string to replace the element's rendering. Return `null` or `undefined` to omit the element from the output entirely. If no callback is registered for an element, its children pass through unchanged.
|
||||
|
||||
### Block callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| ------------ | ------------------------------------------- | ---------------------------------------------------------------------------------------- |
|
||||
| `heading` | `{ level: number, id?: string }` | Heading level 1–6. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `paragraph` | — | Paragraph block |
|
||||
| `blockquote` | — | Blockquote block |
|
||||
| `code` | `{ language?: string }` | Fenced or indented code block. `language` is the info-string when specified on the fence |
|
||||
| `list` | `{ ordered: boolean, start?: number }` | Ordered or unordered list. `start` is the start number for ordered lists |
|
||||
| `listItem` | `{ checked?: boolean }` | List item. `checked` is set for task list items (`- [x]` / `- [ ]`) |
|
||||
| `hr` | — | Horizontal rule |
|
||||
| `table` | — | Table block |
|
||||
| `thead` | — | Table head |
|
||||
| `tbody` | — | Table body |
|
||||
| `tr` | — | Table row |
|
||||
| `th` | `{ align?: "left" \| "center" \| "right" }` | Table header cell. `align` is set when alignment is specified |
|
||||
| `td` | `{ align?: "left" \| "center" \| "right" }` | Table data cell. `align` is set when alignment is specified |
|
||||
| `html` | — | Raw HTML content |
|
||||
|
||||
### Inline callbacks
|
||||
|
||||
| Callback | Meta | Description |
|
||||
| --------------- | ---------------------------------- | ---------------------------- |
|
||||
| `strong` | — | Strong emphasis (`**text**`) |
|
||||
| `emphasis` | — | Emphasis (`*text*`) |
|
||||
| `link` | `{ href: string, title?: string }` | Link |
|
||||
| `image` | `{ src: string, title?: string }` | Image |
|
||||
| `codespan` | — | Inline code (`` `code` ``) |
|
||||
| `strikethrough` | — | Strikethrough (`~~text~~`) |
|
||||
| `text` | — | Plain text content |
|
||||
|
||||
### Examples
|
||||
|
||||
#### Custom HTML with classes
|
||||
|
||||
```ts
|
||||
const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
heading: (children, { level }) => `<h${level} class="heading heading-${level}">${children}</h${level}>`,
|
||||
paragraph: children => `<p class="body">${children}</p>`,
|
||||
strong: children => `<strong class="bold">${children}</strong>`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Stripping all formatting
|
||||
|
||||
```ts
|
||||
const plaintext = Bun.markdown.render("# Hello **world**", {
|
||||
heading: children => children,
|
||||
paragraph: children => children,
|
||||
strong: children => children,
|
||||
emphasis: children => children,
|
||||
link: children => children,
|
||||
image: () => "",
|
||||
code: children => children,
|
||||
codespan: children => children,
|
||||
});
|
||||
// "Hello world"
|
||||
```
|
||||
|
||||
#### Omitting elements
|
||||
|
||||
Return `null` or `undefined` to remove an element from the output:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render("# Title\n\n\n\nHello", {
|
||||
image: () => null, // Remove all images
|
||||
heading: children => children,
|
||||
paragraph: children => children + "\n",
|
||||
});
|
||||
// "Title\nHello\n"
|
||||
```
|
||||
|
||||
#### ANSI terminal output
|
||||
|
||||
```ts
|
||||
const ansi = Bun.markdown.render("# Hello\n\nThis is **bold** and *italic*", {
|
||||
heading: (children, { level }) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
paragraph: children => children + "\n",
|
||||
strong: children => `\x1b[1m${children}\x1b[22m`,
|
||||
emphasis: children => `\x1b[3m${children}\x1b[23m`,
|
||||
});
|
||||
```
|
||||
|
||||
#### Code block syntax highlighting
|
||||
|
||||
````ts
|
||||
const result = Bun.markdown.render("```js\nconsole.log('hi')\n```", {
|
||||
code: (children, meta) => {
|
||||
const lang = meta?.language ?? "";
|
||||
return `<pre><code class="language-${lang}">${children}</code></pre>`;
|
||||
},
|
||||
});
|
||||
````
|
||||
|
||||
### Parser options
|
||||
|
||||
Parser options are passed as a separate third argument:
|
||||
|
||||
```ts
|
||||
const result = Bun.markdown.render(
|
||||
"Visit www.example.com",
|
||||
{
|
||||
link: (children, { href }) => `[${children}](${href})`,
|
||||
paragraph: children => children,
|
||||
},
|
||||
{ autolinks: true },
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `Bun.markdown.react()`
|
||||
|
||||
Render Markdown directly to React elements. Returns a `<Fragment>` that you can use as a component return value.
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text);
|
||||
}
|
||||
```
|
||||
|
||||
### Server-side rendering
|
||||
|
||||
Works with `renderToString()` and React Server Components:
|
||||
|
||||
```tsx
|
||||
import { renderToString } from "react-dom/server";
|
||||
|
||||
const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
// "<h1>Hello <strong>world</strong></h1>"
|
||||
```
|
||||
|
||||
### Component overrides
|
||||
|
||||
Replace any HTML element with a custom React component by passing it in the second argument, keyed by tag name:
|
||||
|
||||
```tsx
|
||||
function Code({ language, children }) {
|
||||
return (
|
||||
<pre data-language={language}>
|
||||
<code>{children}</code>
|
||||
</pre>
|
||||
);
|
||||
}
|
||||
|
||||
function Link({ href, title, children }) {
|
||||
return (
|
||||
<a href={href} title={title} target="_blank" rel="noopener noreferrer">
|
||||
{children}
|
||||
</a>
|
||||
);
|
||||
}
|
||||
|
||||
function Heading({ id, children }) {
|
||||
return (
|
||||
<h2 id={id}>
|
||||
<a href={`#${id}`}>{children}</a>
|
||||
</h2>
|
||||
);
|
||||
}
|
||||
|
||||
const el = Bun.markdown.react(
|
||||
content,
|
||||
{
|
||||
pre: Code,
|
||||
a: Link,
|
||||
h2: Heading,
|
||||
},
|
||||
{ headings: { ids: true } },
|
||||
);
|
||||
```
|
||||
|
||||
#### Available overrides
|
||||
|
||||
Every HTML tag produced by the parser can be overridden:
|
||||
|
||||
| Option | Props | Description |
|
||||
| ------------ | ---------------------------- | --------------------------------------------------------------- |
|
||||
| `h1`–`h6` | `{ id?, children }` | Headings. `id` is set when `headings: { ids: true }` is enabled |
|
||||
| `p` | `{ children }` | Paragraph |
|
||||
| `blockquote` | `{ children }` | Blockquote |
|
||||
| `pre` | `{ language?, children }` | Code block. `language` is the info string (e.g. `"js"`) |
|
||||
| `hr` | `{}` | Horizontal rule (no children) |
|
||||
| `ul` | `{ children }` | Unordered list |
|
||||
| `ol` | `{ start, children }` | Ordered list. `start` is the first item number |
|
||||
| `li` | `{ checked?, children }` | List item. `checked` is set for task list items |
|
||||
| `table` | `{ children }` | Table |
|
||||
| `thead` | `{ children }` | Table head |
|
||||
| `tbody` | `{ children }` | Table body |
|
||||
| `tr` | `{ children }` | Table row |
|
||||
| `th` | `{ align?, children }` | Table header cell |
|
||||
| `td` | `{ align?, children }` | Table data cell |
|
||||
| `em` | `{ children }` | Emphasis (`*text*`) |
|
||||
| `strong` | `{ children }` | Strong (`**text**`) |
|
||||
| `a` | `{ href, title?, children }` | Link |
|
||||
| `img` | `{ src, alt?, title? }` | Image (no children) |
|
||||
| `code` | `{ children }` | Inline code |
|
||||
| `del` | `{ children }` | Strikethrough (`~~text~~`) |
|
||||
| `br` | `{}` | Hard line break (no children) |
|
||||
|
||||
### React 18 and older
|
||||
|
||||
By default, elements use `Symbol.for('react.transitional.element')` as the `$$typeof` symbol. For React 18 and older, pass `reactVersion: 18` in the options (third argument):
|
||||
|
||||
```tsx
|
||||
function Markdown({ text }: { text: string }) {
|
||||
return Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
}
|
||||
```
|
||||
|
||||
### Parser options
|
||||
|
||||
All [parser options](#options) are passed as the third argument:
|
||||
|
||||
```tsx
|
||||
const el = Bun.markdown.react("## Hello World", undefined, {
|
||||
headings: { ids: true },
|
||||
autolinks: true,
|
||||
});
|
||||
```
|
||||
@@ -165,7 +165,7 @@ This page is updated regularly to reflect compatibility status of the latest ver
|
||||
|
||||
### [`node:inspector`](https://nodejs.org/api/inspector.html)
|
||||
|
||||
🟡 Partially implemented. `Profiler` API is supported (`Profiler.enable`, `Profiler.disable`, `Profiler.start`, `Profiler.stop`, `Profiler.setSamplingInterval`). Other inspector APIs are not yet implemented.
|
||||
🔴 Not implemented.
|
||||
|
||||
### [`node:repl`](https://nodejs.org/api/repl.html)
|
||||
|
||||
|
||||
@@ -135,18 +135,6 @@ await s3file.write(JSON.stringify({ name: "John", age: 30 }), {
|
||||
type: "application/json",
|
||||
});
|
||||
|
||||
// Write with content encoding (e.g. for pre-compressed data)
|
||||
await s3file.write(compressedData, {
|
||||
type: "application/json",
|
||||
contentEncoding: "gzip",
|
||||
});
|
||||
|
||||
// Write with content disposition
|
||||
await s3file.write(pdfData, {
|
||||
type: "application/pdf",
|
||||
contentDisposition: 'attachment; filename="report.pdf"',
|
||||
});
|
||||
|
||||
// Write using a writer (streaming)
|
||||
const writer = s3file.writer({ type: "application/json" });
|
||||
writer.write("Hello");
|
||||
@@ -200,13 +188,7 @@ const download = s3.presign("my-file.txt"); // GET, text/plain, expires in 24 ho
|
||||
const upload = s3.presign("my-file", {
|
||||
expiresIn: 3600, // 1 hour
|
||||
method: "PUT",
|
||||
type: "application/json", // Sets response-content-type in the presigned URL
|
||||
});
|
||||
|
||||
// Presign with content disposition (e.g. force download with a specific filename)
|
||||
const downloadUrl = s3.presign("report.pdf", {
|
||||
expiresIn: 3600,
|
||||
contentDisposition: 'attachment; filename="quarterly-report.pdf"',
|
||||
type: "application/json", // No extension for inferring, so we can specify the content type to be JSON
|
||||
});
|
||||
|
||||
// You can call .presign() if on a file reference, but avoid doing so
|
||||
|
||||
@@ -460,7 +460,7 @@ console.log(result); // Blob(13) { size: 13, type: "text/plain" }
|
||||
For cross-platform compatibility, Bun Shell implements a set of builtin commands, in addition to reading commands from the PATH environment variable.
|
||||
|
||||
- `cd`: change the working directory
|
||||
- `ls`: list files in a directory (supports `-l` for long listing format)
|
||||
- `ls`: list files in a directory
|
||||
- `rm`: remove files and directories
|
||||
- `echo`: print text
|
||||
- `pwd`: print the working directory
|
||||
|
||||
@@ -880,94 +880,6 @@ npm/strip-ansi 212,992 chars long-ansi 1.36 ms/iter 1.38 ms
|
||||
|
||||
---
|
||||
|
||||
## `Bun.wrapAnsi()`
|
||||
|
||||
<Note>Drop-in replacement for `wrap-ansi` npm package</Note>
|
||||
|
||||
`Bun.wrapAnsi(input: string, columns: number, options?: WrapAnsiOptions): string`
|
||||
|
||||
Wrap text to a specified column width while preserving ANSI escape codes, hyperlinks, and handling Unicode/emoji width correctly. This is a native, high-performance alternative to the popular [`wrap-ansi`](https://www.npmjs.com/package/wrap-ansi) npm package.
|
||||
|
||||
```ts
|
||||
// Basic wrapping at 20 columns
|
||||
Bun.wrapAnsi("The quick brown fox jumps over the lazy dog", 20);
|
||||
// => "The quick brown fox\njumps over the lazy\ndog"
|
||||
|
||||
// Preserves ANSI escape codes
|
||||
Bun.wrapAnsi("\u001b[31mThe quick brown fox jumps over the lazy dog\u001b[0m", 20);
|
||||
// => "\u001b[31mThe quick brown fox\njumps over the lazy\ndog\u001b[0m"
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
```ts
|
||||
Bun.wrapAnsi("Hello World", 5, {
|
||||
hard: true, // Break words that exceed column width (default: false)
|
||||
wordWrap: true, // Wrap at word boundaries (default: true)
|
||||
trim: true, // Trim leading/trailing whitespace per line (default: true)
|
||||
ambiguousIsNarrow: true, // Treat ambiguous-width characters as narrow (default: true)
|
||||
});
|
||||
```
|
||||
|
||||
| Option | Default | Description |
|
||||
| ------------------- | ------- | --------------------------------------------------------------------------------------------------------------- |
|
||||
| `hard` | `false` | If `true`, break words in the middle if they exceed the column width. |
|
||||
| `wordWrap` | `true` | If `true`, wrap at word boundaries. If `false`, only break at explicit newlines. |
|
||||
| `trim` | `true` | If `true`, trim leading and trailing whitespace from each line. |
|
||||
| `ambiguousIsNarrow` | `true` | If `true`, treat ambiguous-width Unicode characters as 1 column wide. If `false`, treat them as 2 columns wide. |
|
||||
|
||||
TypeScript definition:
|
||||
|
||||
```ts expandable
|
||||
namespace Bun {
|
||||
export function wrapAnsi(
|
||||
/**
|
||||
* The string to wrap
|
||||
*/
|
||||
input: string,
|
||||
/**
|
||||
* The maximum column width
|
||||
*/
|
||||
columns: number,
|
||||
/**
|
||||
* Wrapping options
|
||||
*/
|
||||
options?: {
|
||||
/**
|
||||
* If `true`, break words in the middle if they don't fit on a line.
|
||||
* If `false`, only break at word boundaries.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
hard?: boolean;
|
||||
/**
|
||||
* If `true`, wrap at word boundaries when possible.
|
||||
* If `false`, don't perform word wrapping (only wrap at explicit newlines).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
wordWrap?: boolean;
|
||||
/**
|
||||
* If `true`, trim leading and trailing whitespace from each line.
|
||||
* If `false`, preserve whitespace.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
trim?: boolean;
|
||||
/**
|
||||
* When it's ambiguous and `true`, count ambiguous width characters as 1 character wide.
|
||||
* If `false`, count them as 2 characters wide.
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
ambiguousIsNarrow?: boolean;
|
||||
},
|
||||
): string;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## `serialize` & `deserialize` in `bun:jsc`
|
||||
|
||||
To save a JavaScript value into an ArrayBuffer & back, use `serialize` and `deserialize` from the `"bun:jsc"` module.
|
||||
|
||||
@@ -131,7 +131,6 @@
|
||||
stdenv = pkgs.clangStdenv;
|
||||
}) {
|
||||
inherit packages;
|
||||
hardeningDisable = [ "fortify" ];
|
||||
|
||||
shellHook = ''
|
||||
# Set up build environment
|
||||
|
||||
24
meta.json
24
meta.json
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytes": 21,
|
||||
"imports": [
|
||||
],
|
||||
"format": "esm"
|
||||
}
|
||||
},
|
||||
"outputs": {
|
||||
"./test-entry.js": {
|
||||
"bytes": 49,
|
||||
"inputs": {
|
||||
"../../tmp/test-entry.js": {
|
||||
"bytesInOutput": 22
|
||||
}
|
||||
},
|
||||
"imports": [
|
||||
],
|
||||
"exports": [],
|
||||
"entryPoint": "../../tmp/test-entry.js"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -35,7 +35,7 @@ JSC_DEFINE_HOST_FUNCTION(jsFunctionWrite, (JSC::JSGlobalObject * globalObject,
|
||||
JSValue arg1 = callframe->argument(0);
|
||||
JSValue toWriteArg = callframe->argument(1);
|
||||
auto &vm = globalObject->vm();
|
||||
auto scope = DECLARE_TOP_EXCEPTION_SCOPE(vm);
|
||||
auto scope = DECLARE_CATCH_SCOPE(vm);
|
||||
|
||||
int32_t fd = STDOUT_FILENO;
|
||||
if (callframe->argumentCount() > 1) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.3.8",
|
||||
"version": "1.3.7",
|
||||
"workspaces": [
|
||||
"./packages/bun-types",
|
||||
"./packages/@types/bun"
|
||||
|
||||
@@ -71,25 +71,8 @@ async function buildRootModule(dryRun?: boolean) {
|
||||
js: "// Source code: https://github.com/oven-sh/bun/blob/main/packages/bun-release/scripts/npm-postinstall.ts",
|
||||
},
|
||||
});
|
||||
// Create placeholder scripts that print an error message if postinstall hasn't run.
|
||||
// On Unix, these are executed as shell scripts despite the .exe extension.
|
||||
// Do NOT add a shebang (#!/bin/sh) here — npm's cmd-shim reads shebangs to generate
|
||||
// .ps1/.cmd wrappers BEFORE postinstall runs, and bakes the interpreter path in.
|
||||
// A #!/bin/sh shebang breaks Windows because the wrappers reference /bin/sh which
|
||||
// doesn't exist, even after postinstall replaces the placeholder with the real binary.
|
||||
const placeholderScript = `echo "Error: Bun's postinstall script was not run." >&2
|
||||
echo "" >&2
|
||||
echo "This occurs when using --ignore-scripts during installation, or when using a" >&2
|
||||
echo "package manager like pnpm that does not run postinstall scripts by default." >&2
|
||||
echo "" >&2
|
||||
echo "To fix this, run the postinstall script manually:" >&2
|
||||
echo " cd node_modules/bun && node install.js" >&2
|
||||
echo "" >&2
|
||||
echo "Or reinstall bun without the --ignore-scripts flag." >&2
|
||||
exit 1
|
||||
`;
|
||||
write(join(cwd, "bin", "bun.exe"), placeholderScript);
|
||||
write(join(cwd, "bin", "bunx.exe"), placeholderScript);
|
||||
write(join(cwd, "bin", "bun.exe"), "");
|
||||
write(join(cwd, "bin", "bunx.exe"), "");
|
||||
write(
|
||||
join(cwd, "bin", "README.txt"),
|
||||
`The 'bun.exe' file is a placeholder for the binary file, which
|
||||
|
||||
586
packages/bun-types/bun.d.ts
vendored
586
packages/bun-types/bun.d.ts
vendored
@@ -743,101 +743,6 @@ declare module "bun" {
|
||||
export function parse(input: string): unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSONL (JSON Lines) related APIs.
|
||||
*
|
||||
* Each line in the input is expected to be a valid JSON value separated by newlines.
|
||||
*/
|
||||
namespace JSONL {
|
||||
/**
|
||||
* The result of `Bun.JSONL.parseChunk`.
|
||||
*/
|
||||
interface ParseChunkResult {
|
||||
/** The successfully parsed JSON values. */
|
||||
values: unknown[];
|
||||
/** How far into the input was consumed. When the input is a string, this is a character offset. When the input is a `TypedArray`, this is a byte offset. Use `input.slice(read)` or `input.subarray(read)` to get the unconsumed remainder. */
|
||||
read: number;
|
||||
/** `true` if all input was consumed successfully. `false` if the input ends with an incomplete value or a parse error occurred. */
|
||||
done: boolean;
|
||||
/** A `SyntaxError` if a parse error occurred, otherwise `null`. Values parsed before the error are still available in `values`. */
|
||||
error: SyntaxError | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a JSONL (JSON Lines) string into an array of JavaScript values.
|
||||
*
|
||||
* If a parse error occurs and no values were successfully parsed, throws
|
||||
* a `SyntaxError`. If values were parsed before the error, returns the
|
||||
* successfully parsed values without throwing.
|
||||
*
|
||||
* Incomplete trailing values (e.g. from a partial chunk) are silently
|
||||
* ignored and not included in the result.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @returns An array of parsed values
|
||||
* @throws {SyntaxError} If the input starts with invalid JSON and no values could be parsed
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* const items = Bun.JSONL.parse('{"a":1}\n{"b":2}\n');
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // From a Uint8Array (zero-copy for ASCII):
|
||||
* const buf = new TextEncoder().encode('{"a":1}\n{"b":2}\n');
|
||||
* const items = Bun.JSONL.parse(buf);
|
||||
* // [{ a: 1 }, { b: 2 }]
|
||||
*
|
||||
* // Partial results on error after valid values:
|
||||
* const partial = Bun.JSONL.parse('{"a":1}\n{bad}\n');
|
||||
* // [{ a: 1 }]
|
||||
*
|
||||
* // Throws when no valid values precede the error:
|
||||
* Bun.JSONL.parse('{bad}\n'); // throws SyntaxError
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike): unknown[];
|
||||
|
||||
/**
|
||||
* Parse a JSONL chunk, designed for streaming use.
|
||||
*
|
||||
* Never throws on parse errors. Instead, returns whatever values were
|
||||
* successfully parsed along with an `error` property containing the
|
||||
* `SyntaxError` (or `null` on success). Use `read` to determine how
|
||||
* much input was consumed and `done` to check if all input was parsed.
|
||||
*
|
||||
* When a `TypedArray` is passed, the bytes are parsed directly without
|
||||
* copying if the content is ASCII. Optional `start` and `end` parameters
|
||||
* allow slicing without copying, and `read` will be a byte offset into
|
||||
* the original typed array.
|
||||
*
|
||||
* @param input The JSONL string or typed array to parse
|
||||
* @param start Byte offset to start parsing from (typed array only, default: 0)
|
||||
* @param end Byte offset to stop parsing at (typed array only, default: input.byteLength)
|
||||
* @returns An object with `values`, `read`, `done`, and `error` properties
|
||||
*
|
||||
* @example
|
||||
* ```js
|
||||
* let buffer = new Uint8Array(0);
|
||||
* for await (const chunk of stream) {
|
||||
* buffer = Buffer.concat([buffer, chunk]);
|
||||
* const { values, read, error } = Bun.JSONL.parseChunk(buffer);
|
||||
* if (error) throw error;
|
||||
* for (const value of values) handle(value);
|
||||
* buffer = buffer.subarray(read);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function parseChunk(input: string): ParseChunkResult;
|
||||
export function parseChunk(
|
||||
input: NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
start?: number,
|
||||
end?: number,
|
||||
): ParseChunkResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* YAML related APIs
|
||||
*/
|
||||
@@ -905,480 +810,6 @@ declare module "bun" {
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Markdown related APIs.
|
||||
*
|
||||
* Provides fast markdown parsing and rendering with three output modes:
|
||||
* - `html()` — render to an HTML string
|
||||
* - `render()` — render with custom callbacks for each element
|
||||
* - `react()` — parse to React-compatible JSX elements
|
||||
*
|
||||
* Supports GFM extensions (tables, strikethrough, task lists, autolinks) and
|
||||
* component overrides to replace default HTML tags with custom components.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Render markdown to HTML
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // Render with custom callbacks
|
||||
* const ansi = Bun.markdown.render("# Hello **world**", {
|
||||
* heading: (children, { level }) => `\x1b[1m${children}\x1b[0m\n`,
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* });
|
||||
*
|
||||
* // Render as a React component
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // With component overrides
|
||||
* const element = Bun.markdown.react("# Hello", { h1: MyHeadingComponent });
|
||||
* ```
|
||||
*/
|
||||
namespace markdown {
|
||||
/**
|
||||
* Options for configuring the markdown parser.
|
||||
*
|
||||
* By default, GFM extensions (tables, strikethrough, task lists) are enabled.
|
||||
*/
|
||||
interface Options {
|
||||
/** Enable GFM tables. Default: `true`. */
|
||||
tables?: boolean;
|
||||
/** Enable GFM strikethrough (`~~text~~`). Default: `true`. */
|
||||
strikethrough?: boolean;
|
||||
/** Enable GFM task lists (`- [x] item`). Default: `true`. */
|
||||
tasklists?: boolean;
|
||||
/** Treat soft line breaks as hard line breaks. Default: `false`. */
|
||||
hardSoftBreaks?: boolean;
|
||||
/** Enable wiki-style links (`[[target]]` or `[[target|label]]`). Default: `false`. */
|
||||
wikiLinks?: boolean;
|
||||
/** Enable underline syntax (`__text__` renders as `<u>` instead of `<strong>`). Default: `false`. */
|
||||
underline?: boolean;
|
||||
/** Enable LaTeX math (`$inline$` and `$$display$$`). Default: `false`. */
|
||||
latexMath?: boolean;
|
||||
/** Collapse whitespace in text content. Default: `false`. */
|
||||
collapseWhitespace?: boolean;
|
||||
/** Allow ATX headers without a space after `#`. Default: `false`. */
|
||||
permissiveAtxHeaders?: boolean;
|
||||
/** Disable indented code blocks. Default: `false`. */
|
||||
noIndentedCodeBlocks?: boolean;
|
||||
/** Disable HTML blocks. Default: `false`. */
|
||||
noHtmlBlocks?: boolean;
|
||||
/** Disable inline HTML spans. Default: `false`. */
|
||||
noHtmlSpans?: boolean;
|
||||
/**
|
||||
* Enable the GFM tag filter, which replaces `<` with `<` for disallowed
|
||||
* HTML tags (e.g. `<script>`, `<style>`, `<iframe>`). Default: `false`.
|
||||
*/
|
||||
tagFilter?: boolean;
|
||||
/**
|
||||
* Enable autolinks. Pass `true` to enable all autolink types (URL, WWW, email),
|
||||
* or an object to enable individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable all autolinks
|
||||
* { autolinks: true }
|
||||
* // Enable only URL and email autolinks
|
||||
* { autolinks: { url: true, email: true } }
|
||||
* ```
|
||||
*/
|
||||
autolinks?: boolean | { url?: boolean; www?: boolean; email?: boolean };
|
||||
/**
|
||||
* Configure heading IDs and autolink headings. Pass `true` to enable both
|
||||
* heading IDs and autolink headings, or an object to configure individually.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Enable both heading IDs and autolink headings
|
||||
* { headings: true }
|
||||
* // Enable only heading IDs
|
||||
* { headings: { ids: true } }
|
||||
* ```
|
||||
*/
|
||||
headings?: boolean | { ids?: boolean; autolink?: boolean };
|
||||
}
|
||||
|
||||
/** A component that accepts props `P`: a function, class, or HTML tag name. */
|
||||
type Component<P = {}> = string | ((props: P) => any) | (new (props: P) => any);
|
||||
|
||||
interface ChildrenProps {
|
||||
children: import("./jsx.d.ts").JSX.Element[];
|
||||
}
|
||||
interface HeadingProps extends ChildrenProps {
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
interface OrderedListProps extends ChildrenProps {
|
||||
/** The start number. */
|
||||
start: number;
|
||||
}
|
||||
interface ListItemProps extends ChildrenProps {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
interface CodeBlockProps extends ChildrenProps {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
interface CellProps extends ChildrenProps {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
interface LinkProps extends ChildrenProps {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
interface ImageProps {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Alt text. */
|
||||
alt?: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Component overrides for `react()`.
|
||||
*
|
||||
* Replace default HTML tags with custom React components. Each override
|
||||
* receives the same props the default element would get.
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* Bun.markdown.react(text, { pre: Code });
|
||||
* ```
|
||||
*/
|
||||
interface ComponentOverrides {
|
||||
h1?: Component<HeadingProps>;
|
||||
h2?: Component<HeadingProps>;
|
||||
h3?: Component<HeadingProps>;
|
||||
h4?: Component<HeadingProps>;
|
||||
h5?: Component<HeadingProps>;
|
||||
h6?: Component<HeadingProps>;
|
||||
p?: Component<ChildrenProps>;
|
||||
blockquote?: Component<ChildrenProps>;
|
||||
ul?: Component<ChildrenProps>;
|
||||
ol?: Component<OrderedListProps>;
|
||||
li?: Component<ListItemProps>;
|
||||
pre?: Component<CodeBlockProps>;
|
||||
hr?: Component<{}>;
|
||||
html?: Component<ChildrenProps>;
|
||||
table?: Component<ChildrenProps>;
|
||||
thead?: Component<ChildrenProps>;
|
||||
tbody?: Component<ChildrenProps>;
|
||||
tr?: Component<ChildrenProps>;
|
||||
th?: Component<CellProps>;
|
||||
td?: Component<CellProps>;
|
||||
em?: Component<ChildrenProps>;
|
||||
strong?: Component<ChildrenProps>;
|
||||
a?: Component<LinkProps>;
|
||||
img?: Component<ImageProps>;
|
||||
code?: Component<ChildrenProps>;
|
||||
del?: Component<ChildrenProps>;
|
||||
math?: Component<ChildrenProps>;
|
||||
u?: Component<ChildrenProps>;
|
||||
br?: Component<{}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Callbacks for `render()`. Each callback receives the accumulated children
|
||||
* as a string and optional metadata, and returns a string.
|
||||
*
|
||||
* Return `null` or `undefined` to omit the element from the output.
|
||||
* If no callback is registered for an element, its children pass through unchanged.
|
||||
*/
|
||||
/** Meta passed to the `heading` callback. */
|
||||
interface HeadingMeta {
|
||||
/** Heading level (1–6). */
|
||||
level: number;
|
||||
/** Heading ID slug. Set when `headings: { ids: true }` is enabled. */
|
||||
id?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `code` callback. */
|
||||
interface CodeBlockMeta {
|
||||
/** The info-string language (e.g. `"js"`). */
|
||||
language?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `list` callback. */
|
||||
interface ListMeta {
|
||||
/** Whether this is an ordered list. */
|
||||
ordered: boolean;
|
||||
/** The start number for ordered lists. */
|
||||
start?: number;
|
||||
}
|
||||
|
||||
/** Meta passed to the `listItem` callback. */
|
||||
interface ListItemMeta {
|
||||
/** Task list checked state. Set for `- [x]` / `- [ ]` items. */
|
||||
checked?: boolean;
|
||||
}
|
||||
|
||||
/** Meta passed to `th` and `td` callbacks. */
|
||||
interface CellMeta {
|
||||
/** Column alignment. */
|
||||
align?: "left" | "center" | "right";
|
||||
}
|
||||
|
||||
/** Meta passed to the `link` callback. */
|
||||
interface LinkMeta {
|
||||
/** Link URL. */
|
||||
href: string;
|
||||
/** Link title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
/** Meta passed to the `image` callback. */
|
||||
interface ImageMeta {
|
||||
/** Image URL. */
|
||||
src: string;
|
||||
/** Image title attribute. */
|
||||
title?: string;
|
||||
}
|
||||
|
||||
interface RenderCallbacks {
|
||||
/** Heading (level 1–6). `id` is set when `headings: { ids: true }` is enabled. */
|
||||
heading?: (children: string, meta: HeadingMeta) => string | null | undefined;
|
||||
/** Paragraph. */
|
||||
paragraph?: (children: string) => string | null | undefined;
|
||||
/** Blockquote. */
|
||||
blockquote?: (children: string) => string | null | undefined;
|
||||
/** Code block. `meta.language` is the info-string (e.g. `"js"`). Only passed for fenced code blocks with a language. */
|
||||
code?: (children: string, meta?: CodeBlockMeta) => string | null | undefined;
|
||||
/** Ordered or unordered list. `start` is the first item number for ordered lists. */
|
||||
list?: (children: string, meta: ListMeta) => string | null | undefined;
|
||||
/** List item. `meta.checked` is set for task list items (`- [x]` / `- [ ]`). Only passed for task list items. */
|
||||
listItem?: (children: string, meta?: ListItemMeta) => string | null | undefined;
|
||||
/** Horizontal rule. */
|
||||
hr?: (children: string) => string | null | undefined;
|
||||
/** Table. */
|
||||
table?: (children: string) => string | null | undefined;
|
||||
/** Table head. */
|
||||
thead?: (children: string) => string | null | undefined;
|
||||
/** Table body. */
|
||||
tbody?: (children: string) => string | null | undefined;
|
||||
/** Table row. */
|
||||
tr?: (children: string) => string | null | undefined;
|
||||
/** Table header cell. `meta.align` is set when column alignment is specified. */
|
||||
th?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Table data cell. `meta.align` is set when column alignment is specified. */
|
||||
td?: (children: string, meta?: CellMeta) => string | null | undefined;
|
||||
/** Raw HTML content. */
|
||||
html?: (children: string) => string | null | undefined;
|
||||
/** Strong emphasis (`**text**`). */
|
||||
strong?: (children: string) => string | null | undefined;
|
||||
/** Emphasis (`*text*`). */
|
||||
emphasis?: (children: string) => string | null | undefined;
|
||||
/** Link. `href` is the URL, `title` is the optional title attribute. */
|
||||
link?: (children: string, meta: LinkMeta) => string | null | undefined;
|
||||
/** Image. `src` is the URL, `title` is the optional title attribute. */
|
||||
image?: (children: string, meta: ImageMeta) => string | null | undefined;
|
||||
/** Inline code (`` `code` ``). */
|
||||
codespan?: (children: string) => string | null | undefined;
|
||||
/** Strikethrough (`~~text~~`). */
|
||||
strikethrough?: (children: string) => string | null | undefined;
|
||||
/** Plain text content. */
|
||||
text?: (text: string) => string | null | undefined;
|
||||
}
|
||||
|
||||
/** Options for `react()` — parser options and element symbol configuration. */
|
||||
interface ReactOptions extends Options {
|
||||
/**
|
||||
* Which `$$typeof` symbol to use on the generated elements.
|
||||
* - `19` (default): `Symbol.for('react.transitional.element')`
|
||||
* - `18`: `Symbol.for('react.element')` — use this for React 18 and older
|
||||
*/
|
||||
reactVersion?: 18 | 19;
|
||||
}
|
||||
|
||||
/**
|
||||
* Render markdown to an HTML string.
|
||||
*
|
||||
* @param input The markdown string or buffer to render
|
||||
* @param options Parser options
|
||||
* @returns An HTML string
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const html = Bun.markdown.html("# Hello **world**");
|
||||
* // "<h1>Hello <strong>world</strong></h1>\n"
|
||||
*
|
||||
* // With options
|
||||
* const html = Bun.markdown.html("## Hello", { headings: { ids: true } });
|
||||
* // '<h2 id="hello">Hello</h2>\n'
|
||||
* ```
|
||||
*/
|
||||
export function html(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown with custom JavaScript callbacks for each element.
|
||||
*
|
||||
* Each callback receives the accumulated children as a string and optional
|
||||
* metadata, and returns a string. Return `null` or `undefined` to omit
|
||||
* an element. If no callback is registered, children pass through unchanged.
|
||||
*
|
||||
* Parser options are passed as a separate third argument.
|
||||
*
|
||||
* @param input The markdown string to render
|
||||
* @param callbacks Callbacks for each element type
|
||||
* @param options Parser options
|
||||
* @returns The accumulated string output
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // Custom HTML with classes
|
||||
* const html = Bun.markdown.render("# Title\n\nHello **world**", {
|
||||
* heading: (children, { level }) => `<h${level} class="title">${children}</h${level}>`,
|
||||
* paragraph: (children) => `<p>${children}</p>`,
|
||||
* strong: (children) => `<b>${children}</b>`,
|
||||
* });
|
||||
*
|
||||
* // ANSI terminal output
|
||||
* const ansi = Bun.markdown.render("# Hello\n\n**bold**", {
|
||||
* heading: (children) => `\x1b[1;4m${children}\x1b[0m\n`,
|
||||
* paragraph: (children) => children + "\n",
|
||||
* strong: (children) => `\x1b[1m${children}\x1b[22m`,
|
||||
* });
|
||||
*
|
||||
* // With parser options as third argument
|
||||
* const text = Bun.markdown.render("Visit www.example.com", {
|
||||
* link: (children, { href }) => `[${children}](${href})`,
|
||||
* paragraph: (children) => children,
|
||||
* }, { autolinks: true });
|
||||
* ```
|
||||
*/
|
||||
export function render(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
callbacks?: RenderCallbacks,
|
||||
options?: Options,
|
||||
): string;
|
||||
|
||||
/**
|
||||
* Render markdown to React JSX elements.
|
||||
*
|
||||
* Returns a React Fragment containing the parsed markdown as children.
|
||||
* Can be returned directly from a component or passed to `renderToString()`.
|
||||
*
|
||||
* Override any HTML element with a custom component by passing it in the
|
||||
* second argument, keyed by tag name. Custom components receive the same props
|
||||
* the default elements would (e.g. `href` for links, `language` for code blocks).
|
||||
*
|
||||
* Parser options (including `reactVersion`) are passed as a separate third argument.
|
||||
* Uses `Symbol.for('react.transitional.element')` by default (React 19).
|
||||
* Pass `reactVersion: 18` for React 18 and older.
|
||||
*
|
||||
* @param input The markdown string or buffer to parse
|
||||
* @param components Component overrides keyed by HTML tag name
|
||||
* @param options Parser options and element symbol configuration
|
||||
* @returns A React Fragment element containing the parsed markdown
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* // Use directly as a component return value
|
||||
* function Markdown({ text }: { text: string }) {
|
||||
* return Bun.markdown.react(text);
|
||||
* }
|
||||
*
|
||||
* // Server-side rendering
|
||||
* import { renderToString } from "react-dom/server";
|
||||
* const html = renderToString(Bun.markdown.react("# Hello **world**"));
|
||||
*
|
||||
* // Custom components receive element props
|
||||
* function Code({ language, children }: { language?: string; children: React.ReactNode }) {
|
||||
* return <pre data-language={language}><code>{children}</code></pre>;
|
||||
* }
|
||||
* function Link({ href, children }: { href: string; children: React.ReactNode }) {
|
||||
* return <a href={href} target="_blank">{children}</a>;
|
||||
* }
|
||||
* const el = Bun.markdown.react(text, { pre: Code, a: Link });
|
||||
*
|
||||
* // For React 18 and older
|
||||
* const el18 = Bun.markdown.react(text, undefined, { reactVersion: 18 });
|
||||
* ```
|
||||
*/
|
||||
export function react(
|
||||
input: string | NodeJS.TypedArray | DataView<ArrayBuffer> | ArrayBufferLike,
|
||||
components?: ComponentOverrides,
|
||||
options?: ReactOptions,
|
||||
): import("./jsx.d.ts").JSX.Element;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON5 related APIs
|
||||
*/
|
||||
namespace JSON5 {
|
||||
/**
|
||||
* Parse a JSON5 string into a JavaScript value.
|
||||
*
|
||||
* JSON5 is a superset of JSON based on ECMAScript 5.1 that supports
|
||||
* comments, trailing commas, unquoted keys, single-quoted strings,
|
||||
* hex numbers, Infinity, NaN, and more.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JSON5 string to parse
|
||||
* @returns A JavaScript value
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* const result = JSON5.parse(`{
|
||||
* // This is a comment
|
||||
* name: 'my-app',
|
||||
* version: '1.0.0', // trailing comma is allowed
|
||||
* hex: 0xDEADbeef,
|
||||
* half: .5,
|
||||
* infinity: Infinity,
|
||||
* }`);
|
||||
* ```
|
||||
*/
|
||||
export function parse(input: string): unknown;
|
||||
|
||||
/**
|
||||
* Convert a JavaScript value into a JSON5 string. Object keys that are
|
||||
* valid identifiers are unquoted, strings use double quotes, `Infinity`
|
||||
* and `NaN` are represented as literals, and indented output includes
|
||||
* trailing commas.
|
||||
*
|
||||
* @category Utilities
|
||||
*
|
||||
* @param input The JavaScript value to stringify.
|
||||
* @param replacer Currently not supported.
|
||||
* @param space A number for how many spaces each level of indentation gets, or a string used as indentation.
|
||||
* The number is clamped between 0 and 10, and the first 10 characters of the string are used.
|
||||
* @returns A JSON5 string, or `undefined` if the input is `undefined`, a function, or a symbol.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* import { JSON5 } from "bun";
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: "two" }));
|
||||
* // {a:1,b:"two"}
|
||||
*
|
||||
* console.log(JSON5.stringify({ a: 1, b: 2 }, null, 2));
|
||||
* // {
|
||||
* // a: 1,
|
||||
* // b: 2,
|
||||
* // }
|
||||
* ```
|
||||
*/
|
||||
export function stringify(input: unknown, replacer?: undefined | null, space?: string | number): string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously resolve a `moduleId` as though it were imported from `parent`
|
||||
*
|
||||
@@ -2314,17 +1745,6 @@ declare module "bun" {
|
||||
* @default "warn"
|
||||
*/
|
||||
logLevel?: "verbose" | "debug" | "info" | "warn" | "error";
|
||||
|
||||
/**
|
||||
* Enable REPL mode transforms:
|
||||
* - Wraps top-level inputs that appear to be object literals (inputs starting with '{' without trailing ';') in parentheses
|
||||
* - Hoists all declarations as var for REPL persistence across vm.runInContext calls
|
||||
* - Wraps last expression in { __proto__: null, value: expr } for result capture
|
||||
* - Wraps code in sync/async IIFE to avoid parentheses around object literals
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
replMode?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2431,7 +1851,7 @@ declare module "bun" {
|
||||
type Architecture = "x64" | "arm64";
|
||||
type Libc = "glibc" | "musl";
|
||||
type SIMD = "baseline" | "modern";
|
||||
type CompileTarget =
|
||||
type Target =
|
||||
| `bun-darwin-${Architecture}`
|
||||
| `bun-darwin-x64-${SIMD}`
|
||||
| `bun-linux-${Architecture}`
|
||||
@@ -2773,7 +2193,7 @@ declare module "bun" {
|
||||
}
|
||||
|
||||
interface CompileBuildOptions {
|
||||
target?: Bun.Build.CompileTarget;
|
||||
target?: Bun.Build.Target;
|
||||
execArgv?: string[];
|
||||
executablePath?: string;
|
||||
outfile?: string;
|
||||
@@ -2855,7 +2275,7 @@ declare module "bun" {
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
compile: boolean | Bun.Build.CompileTarget | CompileBuildOptions;
|
||||
compile: boolean | Bun.Build.Target | CompileBuildOptions;
|
||||
|
||||
/**
|
||||
* Splitting is not currently supported with `.compile`
|
||||
|
||||
5
packages/bun-types/extensions.d.ts
vendored
5
packages/bun-types/extensions.d.ts
vendored
@@ -23,11 +23,6 @@ declare module "*.jsonc" {
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*.json5" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare module "*/bun.lock" {
|
||||
var contents: import("bun").BunLockFile;
|
||||
export = contents;
|
||||
|
||||
11
packages/bun-types/jsx.d.ts
vendored
11
packages/bun-types/jsx.d.ts
vendored
@@ -1,11 +0,0 @@
|
||||
export {};
|
||||
|
||||
type ReactElement = typeof globalThis extends { React: infer React }
|
||||
? React extends { createElement(...args: any): infer R }
|
||||
? R
|
||||
: never
|
||||
: unknown;
|
||||
|
||||
export namespace JSX {
|
||||
export type Element = ReactElement;
|
||||
}
|
||||
@@ -39,7 +39,6 @@ add_compile_definitions(
|
||||
CONFIG_TCC_PREDEFS
|
||||
ONE_SOURCE=0
|
||||
TCC_LIBTCC1="\\0"
|
||||
CONFIG_TCC_BACKTRACE=0
|
||||
)
|
||||
|
||||
if(APPLE)
|
||||
|
||||
@@ -23,10 +23,7 @@ const OS_NAME = platform().toLowerCase();
|
||||
const ARCH_NAME_RAW = arch();
|
||||
const IS_MAC = OS_NAME === "darwin";
|
||||
const IS_LINUX = OS_NAME === "linux";
|
||||
const IS_WINDOWS = OS_NAME === "win32";
|
||||
// On Windows, use PROCESSOR_ARCHITECTURE env var to get native arch (Bun may run under x64 emulation)
|
||||
const NATIVE_ARCH = IS_WINDOWS ? (process.env.PROCESSOR_ARCHITECTURE || ARCH_NAME_RAW).toUpperCase() : ARCH_NAME_RAW;
|
||||
const IS_ARM64 = NATIVE_ARCH === "ARM64" || NATIVE_ARCH === "AARCH64" || ARCH_NAME_RAW === "arm64";
|
||||
const IS_ARM64 = ARCH_NAME_RAW === "arm64" || ARCH_NAME_RAW === "aarch64";
|
||||
|
||||
// Paths
|
||||
const ROOT_DIR = resolve(import.meta.dir, "..");
|
||||
@@ -36,54 +33,22 @@ const WEBKIT_RELEASE_DIR = join(WEBKIT_BUILD_DIR, "Release");
|
||||
const WEBKIT_DEBUG_DIR = join(WEBKIT_BUILD_DIR, "Debug");
|
||||
const WEBKIT_RELEASE_DIR_LTO = join(WEBKIT_BUILD_DIR, "ReleaseLTO");
|
||||
|
||||
// Windows ICU paths - use vcpkg static build
|
||||
// Auto-detect triplet: prefer arm64 if it exists, otherwise x64
|
||||
const VCPKG_ARM64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "arm64-windows-static");
|
||||
const VCPKG_X64_PATH = join(WEBKIT_DIR, "vcpkg_installed", "x64-windows-static");
|
||||
const VCPKG_ROOT = existsSync(VCPKG_ARM64_PATH) ? VCPKG_ARM64_PATH : VCPKG_X64_PATH;
|
||||
const ICU_INCLUDE_DIR = join(VCPKG_ROOT, "include");
|
||||
|
||||
// Get ICU library paths based on build config (debug uses 'd' suffix libraries)
|
||||
function getICULibraryPaths(config: BuildConfig) {
|
||||
const isDebug = config === "debug";
|
||||
// vcpkg static ICU libraries: release in lib/, debug in debug/lib/ with 'd' suffix
|
||||
const libDir = isDebug ? join(VCPKG_ROOT, "debug", "lib") : join(VCPKG_ROOT, "lib");
|
||||
const suffix = isDebug ? "d" : "";
|
||||
return {
|
||||
ICU_LIBRARY: libDir,
|
||||
ICU_DATA_LIBRARY: join(libDir, `sicudt${suffix}.lib`),
|
||||
ICU_I18N_LIBRARY: join(libDir, `sicuin${suffix}.lib`),
|
||||
ICU_UC_LIBRARY: join(libDir, `sicuuc${suffix}.lib`),
|
||||
};
|
||||
}
|
||||
|
||||
// Homebrew prefix detection
|
||||
const HOMEBREW_PREFIX = IS_ARM64 ? "/opt/homebrew/" : "/usr/local/";
|
||||
|
||||
// Compiler detection
|
||||
function findExecutable(names: string[]): string | null {
|
||||
for (const name of names) {
|
||||
const path = Bun.which(name);
|
||||
if (path) return path;
|
||||
const result = spawnSync("which", [name], { encoding: "utf8" });
|
||||
if (result.status === 0) {
|
||||
return result.stdout.trim();
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// Detect ccache
|
||||
const CCACHE = findExecutable(["ccache"]);
|
||||
const HAS_CCACHE = CCACHE !== null;
|
||||
|
||||
// Configure compilers with ccache if available
|
||||
// On Windows, use clang-cl for MSVC compatibility
|
||||
const CC_BASE = IS_WINDOWS
|
||||
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
|
||||
: findExecutable(["clang-19", "clang"]) || "clang";
|
||||
const CXX_BASE = IS_WINDOWS
|
||||
? findExecutable(["clang-cl.exe", "clang-cl"]) || "clang-cl"
|
||||
: findExecutable(["clang++-19", "clang++"]) || "clang++";
|
||||
|
||||
const CC = HAS_CCACHE ? CCACHE : CC_BASE;
|
||||
const CXX = HAS_CCACHE ? CCACHE : CXX_BASE;
|
||||
const CC = findExecutable(["clang-19", "clang"]) || "clang";
|
||||
const CXX = findExecutable(["clang++-19", "clang++"]) || "clang++";
|
||||
|
||||
// Build directory based on config
|
||||
const getBuildDir = (config: BuildConfig) => {
|
||||
@@ -98,7 +63,7 @@ const getBuildDir = (config: BuildConfig) => {
|
||||
};
|
||||
|
||||
// Common CMake flags
|
||||
const getCommonFlags = (config: BuildConfig) => {
|
||||
const getCommonFlags = () => {
|
||||
const flags = [
|
||||
"-DPORT=JSCOnly",
|
||||
"-DENABLE_STATIC_JSC=ON",
|
||||
@@ -109,27 +74,16 @@ const getCommonFlags = (config: BuildConfig) => {
|
||||
"-DENABLE_FTL_JIT=ON",
|
||||
"-G",
|
||||
"Ninja",
|
||||
`-DCMAKE_C_COMPILER=${CC}`,
|
||||
`-DCMAKE_CXX_COMPILER=${CXX}`,
|
||||
];
|
||||
|
||||
// Configure compiler with ccache if available
|
||||
if (HAS_CCACHE) {
|
||||
flags.push(
|
||||
`-DCMAKE_C_COMPILER_LAUNCHER=${CCACHE}`,
|
||||
`-DCMAKE_CXX_COMPILER_LAUNCHER=${CCACHE}`,
|
||||
`-DCMAKE_C_COMPILER=${CC_BASE}`,
|
||||
`-DCMAKE_CXX_COMPILER=${CXX_BASE}`,
|
||||
);
|
||||
} else {
|
||||
flags.push(`-DCMAKE_C_COMPILER=${CC}`, `-DCMAKE_CXX_COMPILER=${CXX}`);
|
||||
}
|
||||
|
||||
if (IS_MAC) {
|
||||
flags.push(
|
||||
"-DENABLE_SINGLE_THREADED_VM_ENTRY_SCOPE=ON",
|
||||
"-DBUN_FAST_TLS=ON",
|
||||
"-DPTHREAD_JIT_PERMISSIONS_API=1",
|
||||
"-DUSE_PTHREAD_JIT_PERMISSIONS_API=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
);
|
||||
} else if (IS_LINUX) {
|
||||
flags.push(
|
||||
@@ -137,27 +91,6 @@ const getCommonFlags = (config: BuildConfig) => {
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
);
|
||||
} else if (IS_WINDOWS) {
|
||||
// Find lld-link for Windows builds
|
||||
const lldLink = findExecutable(["lld-link.exe", "lld-link"]) || "lld-link";
|
||||
// Get ICU library paths for this build config (debug uses 'd' suffix libraries)
|
||||
const icuPaths = getICULibraryPaths(config);
|
||||
|
||||
flags.push(
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
"-DUSE_SYSTEM_MALLOC=ON",
|
||||
`-DCMAKE_LINKER=${lldLink}`,
|
||||
`-DICU_ROOT=${VCPKG_ROOT}`,
|
||||
`-DICU_LIBRARY=${icuPaths.ICU_LIBRARY}`,
|
||||
`-DICU_INCLUDE_DIR=${ICU_INCLUDE_DIR}`,
|
||||
// Explicitly set ICU library paths to use vcpkg static libs (debug has 'd' suffix)
|
||||
`-DICU_DATA_LIBRARY_RELEASE=${icuPaths.ICU_DATA_LIBRARY}`,
|
||||
`-DICU_I18N_LIBRARY_RELEASE=${icuPaths.ICU_I18N_LIBRARY}`,
|
||||
`-DICU_UC_LIBRARY_RELEASE=${icuPaths.ICU_UC_LIBRARY}`,
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION",
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors",
|
||||
);
|
||||
}
|
||||
|
||||
return flags;
|
||||
@@ -165,7 +98,7 @@ const getCommonFlags = (config: BuildConfig) => {
|
||||
|
||||
// Build-specific CMake flags
|
||||
const getBuildFlags = (config: BuildConfig) => {
|
||||
const flags = [...getCommonFlags(config)];
|
||||
const flags = [...getCommonFlags()];
|
||||
|
||||
switch (config) {
|
||||
case "debug":
|
||||
@@ -173,40 +106,24 @@ const getBuildFlags = (config: BuildConfig) => {
|
||||
"-DCMAKE_BUILD_TYPE=Debug",
|
||||
"-DENABLE_BUN_SKIP_FAILING_ASSERTIONS=ON",
|
||||
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
|
||||
"-DENABLE_REMOTE_INSPECTOR=ON",
|
||||
"-DUSE_VISIBILITY_ATTRIBUTE=1",
|
||||
);
|
||||
|
||||
if (IS_MAC || IS_LINUX) {
|
||||
// Enable address sanitizer by default on Mac/Linux debug builds
|
||||
if (IS_MAC) {
|
||||
// Enable address sanitizer by default on Mac debug builds
|
||||
flags.push("-DENABLE_SANITIZERS=address");
|
||||
// To disable asan, comment the line above and uncomment:
|
||||
// flags.push("-DENABLE_MALLOC_HEAP_BREAKDOWN=ON");
|
||||
}
|
||||
|
||||
if (IS_WINDOWS) {
|
||||
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreadedDebug");
|
||||
}
|
||||
break;
|
||||
|
||||
case "lto":
|
||||
flags.push("-DCMAKE_BUILD_TYPE=Release");
|
||||
if (IS_WINDOWS) {
|
||||
// On Windows, append LTO flags to existing Windows-specific flags
|
||||
flags.push(
|
||||
"-DCMAKE_C_FLAGS=/DU_STATIC_IMPLEMENTATION -flto=full",
|
||||
"-DCMAKE_CXX_FLAGS=/DU_STATIC_IMPLEMENTATION /clang:-fno-c++-static-destructors -flto=full",
|
||||
"-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded",
|
||||
);
|
||||
} else {
|
||||
flags.push("-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
|
||||
}
|
||||
flags.push("-DCMAKE_BUILD_TYPE=Release", "-DCMAKE_C_FLAGS=-flto=full", "-DCMAKE_CXX_FLAGS=-flto=full");
|
||||
break;
|
||||
|
||||
default: // release
|
||||
flags.push("-DCMAKE_BUILD_TYPE=RelWithDebInfo");
|
||||
if (IS_WINDOWS) {
|
||||
flags.push("-DCMAKE_MSVC_RUNTIME_LIBRARY=MultiThreaded");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -217,6 +134,17 @@ const getBuildFlags = (config: BuildConfig) => {
|
||||
const getBuildEnv = () => {
|
||||
const env = { ...process.env };
|
||||
|
||||
const cflags = ["-ffat-lto-objects"];
|
||||
const cxxflags = ["-ffat-lto-objects"];
|
||||
|
||||
if (IS_LINUX && buildConfig !== "lto") {
|
||||
cflags.push("-Wl,--whole-archive");
|
||||
cxxflags.push("-Wl,--whole-archive", "-DUSE_BUN_JSC_ADDITIONS=ON", "-DUSE_BUN_EVENT_LOOP=ON");
|
||||
}
|
||||
|
||||
env.CFLAGS = (env.CFLAGS || "") + " " + cflags.join(" ");
|
||||
env.CXXFLAGS = (env.CXXFLAGS || "") + " " + cxxflags.join(" ");
|
||||
|
||||
if (IS_MAC) {
|
||||
env.ICU_INCLUDE_DIRS = `${HOMEBREW_PREFIX}opt/icu4c/include`;
|
||||
}
|
||||
@@ -251,9 +179,6 @@ function buildJSC() {
|
||||
|
||||
console.log(`Building JSC with configuration: ${buildConfig}`);
|
||||
console.log(`Build directory: ${buildDir}`);
|
||||
if (HAS_CCACHE) {
|
||||
console.log(`Using ccache for faster builds: ${CCACHE}`);
|
||||
}
|
||||
|
||||
// Create build directories
|
||||
if (!existsSync(buildDir)) {
|
||||
|
||||
@@ -14,15 +14,6 @@ import {
|
||||
startGroup,
|
||||
} from "./utils.mjs";
|
||||
|
||||
// Detect Windows ARM64 - bun may run under x64 emulation (WoW64), so check multiple indicators
|
||||
const isWindowsARM64 =
|
||||
isWindows &&
|
||||
(process.env.PROCESSOR_ARCHITECTURE === "ARM64" ||
|
||||
process.env.VSCMD_ARG_HOST_ARCH === "arm64" ||
|
||||
process.env.MSYSTEM_CARCH === "aarch64" ||
|
||||
(process.env.PROCESSOR_IDENTIFIER || "").includes("ARMv8") ||
|
||||
process.arch === "arm64");
|
||||
|
||||
if (globalThis.Bun) {
|
||||
await import("./glob-sources.mjs");
|
||||
}
|
||||
@@ -92,23 +83,6 @@ async function build(args) {
|
||||
generateOptions["--toolchain"] = toolchainPath;
|
||||
}
|
||||
|
||||
// Windows ARM64: automatically set required options
|
||||
if (isWindowsARM64) {
|
||||
// Use clang-cl instead of MSVC cl.exe for proper ARM64 flag support
|
||||
if (!generateOptions["-DCMAKE_C_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_C_COMPILER"] = "clang-cl";
|
||||
}
|
||||
if (!generateOptions["-DCMAKE_CXX_COMPILER"]) {
|
||||
generateOptions["-DCMAKE_CXX_COMPILER"] = "clang-cl";
|
||||
}
|
||||
// Skip codegen by default since x64 bun crashes under WoW64 emulation
|
||||
// Can be overridden with -DSKIP_CODEGEN=OFF once ARM64 bun is available
|
||||
if (!generateOptions["-DSKIP_CODEGEN"]) {
|
||||
generateOptions["-DSKIP_CODEGEN"] = "ON";
|
||||
}
|
||||
console.log("Windows ARM64 detected: using clang-cl and SKIP_CODEGEN=ON");
|
||||
}
|
||||
|
||||
const generateArgs = Object.entries(generateOptions).flatMap(([flag, value]) =>
|
||||
flag.startsWith("-D") ? [`${flag}=${value}`] : [flag, value],
|
||||
);
|
||||
|
||||
@@ -215,12 +215,7 @@ while (true) {
|
||||
}
|
||||
|
||||
// Check if build is pending/running/scheduled
|
||||
if (
|
||||
build.state === "scheduled" ||
|
||||
build.state === "running" ||
|
||||
build.state === "creating" ||
|
||||
build.state === "started"
|
||||
) {
|
||||
if (build.state === "scheduled" || build.state === "running" || build.state === "creating") {
|
||||
const runningJobs = build.jobs?.filter((job: any) => job.state === "running") || [];
|
||||
const pendingJobs = build.jobs?.filter((job: any) => job.state === "scheduled" || job.state === "waiting") || [];
|
||||
const passedJobs = build.jobs?.filter((job: any) => job.state === "passed") || [];
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Updates the vendored uucode library and regenerates grapheme tables.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/update-uucode.sh # update from default URL
|
||||
# ./scripts/update-uucode.sh /path/to/uucode # update from local directory
|
||||
# ./scripts/update-uucode.sh https://url.tar.gz # update from URL
|
||||
#
|
||||
# After running, verify with:
|
||||
# bun bd test test/js/bun/util/stringWidth.test.ts
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BUN_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
UUCODE_DIR="$BUN_ROOT/src/deps/uucode"
|
||||
ZIG="$BUN_ROOT/vendor/zig/zig"
|
||||
|
||||
if [ ! -x "$ZIG" ]; then
|
||||
echo "error: zig not found at $ZIG"
|
||||
echo " run scripts/bootstrap.sh first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
update_from_dir() {
|
||||
local src="$1"
|
||||
echo "Updating uucode from: $src"
|
||||
rm -rf "$UUCODE_DIR"
|
||||
mkdir -p "$UUCODE_DIR"
|
||||
cp -r "$src"/* "$UUCODE_DIR/"
|
||||
}
|
||||
|
||||
update_from_url() {
|
||||
local url="$1"
|
||||
local tmp
|
||||
tmp=$(mktemp -d)
|
||||
trap "rm -rf $tmp" EXIT
|
||||
|
||||
echo "Downloading uucode from: $url"
|
||||
curl -fsSL "$url" | tar -xz -C "$tmp" --strip-components=1
|
||||
|
||||
update_from_dir "$tmp"
|
||||
}
|
||||
|
||||
# Handle source argument
|
||||
if [ $# -ge 1 ]; then
|
||||
SOURCE="$1"
|
||||
if [ -d "$SOURCE" ]; then
|
||||
update_from_dir "$SOURCE"
|
||||
elif [[ "$SOURCE" == http* ]]; then
|
||||
update_from_url "$SOURCE"
|
||||
else
|
||||
echo "error: argument must be a directory or URL"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Default: use the zig global cache if available
|
||||
CACHED=$(find "$HOME/.cache/zig/p" -maxdepth 1 -name "uucode-*" -type d 2>/dev/null | sort -V | tail -1)
|
||||
if [ -n "$CACHED" ]; then
|
||||
update_from_dir "$CACHED"
|
||||
else
|
||||
echo "error: no uucode source specified and none found in zig cache"
|
||||
echo ""
|
||||
echo "usage: $0 <path-to-uucode-dir-or-url>"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Regenerating grapheme tables..."
|
||||
cd "$BUN_ROOT"
|
||||
"$ZIG" build generate-grapheme-tables
|
||||
|
||||
echo ""
|
||||
echo "Done. Updated files:"
|
||||
echo " src/deps/uucode/ (vendored library)"
|
||||
echo " src/string/immutable/grapheme_tables.zig (regenerated)"
|
||||
echo ""
|
||||
echo "Next steps:"
|
||||
echo " 1. bun bd test test/js/bun/util/stringWidth.test.ts"
|
||||
echo " 2. git add src/deps/uucode src/string/immutable/grapheme_tables.zig"
|
||||
echo " 3. git commit -m 'Update uucode to <version>'"
|
||||
@@ -3,10 +3,6 @@
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Detect system architecture
|
||||
$script:IsARM64 = [System.Runtime.InteropServices.RuntimeInformation]::OSArchitecture -eq [System.Runtime.InteropServices.Architecture]::Arm64
|
||||
$script:VsArch = if ($script:IsARM64) { "arm64" } else { "amd64" }
|
||||
|
||||
if($env:VSINSTALLDIR -eq $null) {
|
||||
Write-Host "Loading Visual Studio environment, this may take a second..."
|
||||
|
||||
@@ -27,14 +23,14 @@ if($env:VSINSTALLDIR -eq $null) {
|
||||
Push-Location $vsDir
|
||||
try {
|
||||
$vsShell = (Join-Path -Path $vsDir -ChildPath "Common7\Tools\Launch-VsDevShell.ps1")
|
||||
. $vsShell -Arch $script:VsArch -HostArch $script:VsArch
|
||||
. $vsShell -Arch amd64 -HostArch amd64
|
||||
} finally {
|
||||
Pop-Location
|
||||
}
|
||||
}
|
||||
|
||||
if($env:VSCMD_ARG_TGT_ARCH -eq "x86") {
|
||||
throw "Visual Studio environment is targeting 32 bit x86, but only 64-bit architectures (x64/arm64) are supported."
|
||||
throw "Visual Studio environment is targeting 32 bit, but only 64 bit is supported."
|
||||
}
|
||||
|
||||
if ($args.Count -gt 0) {
|
||||
|
||||
@@ -429,49 +429,16 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
const bytecode: StringPointer = brk: {
|
||||
if (output_file.bytecode_index != std.math.maxInt(u32)) {
|
||||
// Bytecode alignment for JSC bytecode cache deserialization.
|
||||
// Not aligning correctly causes a runtime assertion error or segfault.
|
||||
//
|
||||
// PLATFORM-SPECIFIC ALIGNMENT:
|
||||
// - PE (Windows) and Mach-O (macOS): The module graph data is embedded in
|
||||
// a dedicated section with an 8-byte size header. At runtime, the section
|
||||
// is memory-mapped at a page-aligned address (hence 128-byte aligned).
|
||||
// The data buffer starts 8 bytes after the section start.
|
||||
// For bytecode at offset O to be 128-byte aligned:
|
||||
// (section_va + 8 + O) % 128 == 0
|
||||
// => O % 128 == 120
|
||||
//
|
||||
// - ELF (Linux): The module graph data is appended to the executable and
|
||||
// read into a heap-allocated buffer at runtime. The allocator provides
|
||||
// natural alignment, and there's no 8-byte section header offset.
|
||||
// However, using target_mod=120 is still safe because:
|
||||
// - If the buffer is 128-aligned: bytecode at offset 120 is at (128n + 120),
|
||||
// which when loaded at a 128-aligned address gives proper alignment.
|
||||
// - The extra 120 bytes of padding is acceptable overhead.
|
||||
//
|
||||
// This alignment strategy (target_mod=120) works for all platforms because
|
||||
// it's the worst-case offset needed for the 8-byte header scenario.
|
||||
// Use up to 256 byte alignment for bytecode
|
||||
// Not aligning it correctly will cause a runtime assertion error, or a segfault.
|
||||
const bytecode = output_files[output_file.bytecode_index].value.buffer.bytes;
|
||||
const current_offset = string_builder.len;
|
||||
// Calculate padding so that (current_offset + padding) % 128 == 120
|
||||
// This accounts for the 8-byte section header on PE/Mach-O platforms.
|
||||
const target_mod: usize = 128 - @sizeOf(u64); // 120 = accounts for 8-byte header
|
||||
const current_mod = current_offset % 128;
|
||||
const padding = if (current_mod <= target_mod)
|
||||
target_mod - current_mod
|
||||
else
|
||||
128 - current_mod + target_mod;
|
||||
// Zero the padding bytes to ensure deterministic output
|
||||
const writable = string_builder.writable();
|
||||
@memset(writable[0..padding], 0);
|
||||
string_builder.len += padding;
|
||||
const aligned_offset = string_builder.len;
|
||||
const writable_after_padding = string_builder.writable();
|
||||
@memcpy(writable_after_padding[0..bytecode.len], bytecode[0..bytecode.len]);
|
||||
const unaligned_space = writable_after_padding[bytecode.len..];
|
||||
const aligned = std.mem.alignInSlice(string_builder.writable(), 128).?;
|
||||
@memcpy(aligned[0..bytecode.len], bytecode[0..bytecode.len]);
|
||||
const unaligned_space = aligned[bytecode.len..];
|
||||
const offset = @intFromPtr(aligned.ptr) - @intFromPtr(string_builder.ptr.?);
|
||||
const len = bytecode.len + @min(unaligned_space.len, 128);
|
||||
string_builder.len += len;
|
||||
break :brk StringPointer{ .offset = @truncate(aligned_offset), .length = @truncate(len) };
|
||||
break :brk StringPointer{ .offset = @truncate(offset), .length = @truncate(len) };
|
||||
} else {
|
||||
break :brk .{};
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ pub const AllocationScopeIn = allocation_scope.AllocationScopeIn;
|
||||
pub const NullableAllocator = @import("./allocators/NullableAllocator.zig");
|
||||
pub const MaxHeapAllocator = @import("./allocators/MaxHeapAllocator.zig");
|
||||
pub const LinuxMemFdAllocator = @import("./allocators/LinuxMemFdAllocator.zig");
|
||||
pub const BufferFallbackAllocator = @import("./allocators/BufferFallbackAllocator.zig");
|
||||
pub const MaybeOwned = @import("./allocators/maybe_owned.zig").MaybeOwned;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
/// An allocator that attempts to allocate from a provided buffer first,
|
||||
/// falling back to another allocator when the buffer is exhausted.
|
||||
/// Unlike `std.heap.StackFallbackAllocator`, this does not own the buffer.
|
||||
const BufferFallbackAllocator = @This();
|
||||
|
||||
#fallback_allocator: Allocator,
|
||||
#fixed_buffer_allocator: FixedBufferAllocator,
|
||||
|
||||
pub fn init(buffer: []u8, fallback_allocator: Allocator) BufferFallbackAllocator {
|
||||
return .{
|
||||
.#fallback_allocator = fallback_allocator,
|
||||
.#fixed_buffer_allocator = FixedBufferAllocator.init(buffer),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn allocator(self: *BufferFallbackAllocator) Allocator {
|
||||
return .{
|
||||
.ptr = self,
|
||||
.vtable = &.{
|
||||
.alloc = alloc,
|
||||
.resize = resize,
|
||||
.remap = remap,
|
||||
.free = free,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ra: usize) ?[*]u8 {
|
||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
||||
return FixedBufferAllocator.alloc(
|
||||
&self.#fixed_buffer_allocator,
|
||||
len,
|
||||
alignment,
|
||||
ra,
|
||||
) orelse self.#fallback_allocator.rawAlloc(len, alignment, ra);
|
||||
}
|
||||
|
||||
fn resize(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) bool {
|
||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
||||
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
|
||||
return FixedBufferAllocator.resize(
|
||||
&self.#fixed_buffer_allocator,
|
||||
buf,
|
||||
alignment,
|
||||
new_len,
|
||||
ra,
|
||||
);
|
||||
}
|
||||
return self.#fallback_allocator.rawResize(buf, alignment, new_len, ra);
|
||||
}
|
||||
|
||||
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ra: usize) ?[*]u8 {
|
||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
||||
if (self.#fixed_buffer_allocator.ownsPtr(memory.ptr)) {
|
||||
return FixedBufferAllocator.remap(
|
||||
&self.#fixed_buffer_allocator,
|
||||
memory,
|
||||
alignment,
|
||||
new_len,
|
||||
ra,
|
||||
);
|
||||
}
|
||||
return self.#fallback_allocator.rawRemap(memory, alignment, new_len, ra);
|
||||
}
|
||||
|
||||
fn free(ctx: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ra: usize) void {
|
||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
||||
if (self.#fixed_buffer_allocator.ownsPtr(buf.ptr)) {
|
||||
return FixedBufferAllocator.free(
|
||||
&self.#fixed_buffer_allocator,
|
||||
buf,
|
||||
alignment,
|
||||
ra,
|
||||
);
|
||||
}
|
||||
return self.#fallback_allocator.rawFree(buf, alignment, ra);
|
||||
}
|
||||
|
||||
pub fn reset(self: *BufferFallbackAllocator) void {
|
||||
self.#fixed_buffer_allocator.reset();
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
|
||||
@@ -2,10 +2,7 @@
|
||||
|
||||
const Self = @This();
|
||||
|
||||
const safety_checks = bun.Environment.isDebug or bun.Environment.enable_asan;
|
||||
|
||||
#heap: *mimalloc.Heap,
|
||||
thread_id: if (safety_checks) std.Thread.Id else void,
|
||||
#heap: if (safety_checks) Owned(*DebugHeap) else *mimalloc.Heap,
|
||||
|
||||
/// Uses the default thread-local heap. This type is zero-sized.
|
||||
///
|
||||
@@ -23,18 +20,18 @@ pub const Default = struct {
|
||||
///
|
||||
/// This type is a `GenericAllocator`; see `src/allocators.zig`.
|
||||
pub const Borrowed = struct {
|
||||
#heap: *mimalloc.Heap,
|
||||
#heap: BorrowedHeap,
|
||||
|
||||
pub fn allocator(self: Borrowed) std.mem.Allocator {
|
||||
return .{ .ptr = self.#heap, .vtable = c_allocator_vtable };
|
||||
return .{ .ptr = self.#heap, .vtable = &c_allocator_vtable };
|
||||
}
|
||||
|
||||
pub fn getDefault() Borrowed {
|
||||
return .{ .#heap = mimalloc.mi_heap_main() };
|
||||
return .{ .#heap = getThreadHeap() };
|
||||
}
|
||||
|
||||
pub fn gc(self: Borrowed) void {
|
||||
mimalloc.mi_heap_collect(self.#heap, false);
|
||||
mimalloc.mi_heap_collect(self.getMimallocHeap(), false);
|
||||
}
|
||||
|
||||
pub fn helpCatchMemoryIssues(self: Borrowed) void {
|
||||
@@ -44,17 +41,30 @@ pub const Borrowed = struct {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ownsPtr(self: Borrowed, ptr: *const anyopaque) bool {
|
||||
return mimalloc.mi_heap_check_owned(self.getMimallocHeap(), ptr);
|
||||
}
|
||||
|
||||
fn fromOpaque(ptr: *anyopaque) Borrowed {
|
||||
return .{ .#heap = @ptrCast(@alignCast(ptr)) };
|
||||
}
|
||||
|
||||
fn getMimallocHeap(self: Borrowed) *mimalloc.Heap {
|
||||
return if (comptime safety_checks) self.#heap.inner else self.#heap;
|
||||
}
|
||||
|
||||
fn assertThreadLock(self: Borrowed) void {
|
||||
if (comptime safety_checks) self.#heap.thread_lock.assertLocked();
|
||||
}
|
||||
|
||||
fn alignedAlloc(self: Borrowed, len: usize, alignment: Alignment) ?[*]u8 {
|
||||
log("Malloc: {d}\n", .{len});
|
||||
|
||||
const heap = self.getMimallocHeap();
|
||||
const ptr: ?*anyopaque = if (mimalloc.mustUseAlignedAlloc(alignment))
|
||||
mimalloc.mi_heap_malloc_aligned(self.#heap, len, alignment.toByteUnits())
|
||||
mimalloc.mi_heap_malloc_aligned(heap, len, alignment.toByteUnits())
|
||||
else
|
||||
mimalloc.mi_heap_malloc(self.#heap, len);
|
||||
mimalloc.mi_heap_malloc(heap, len);
|
||||
|
||||
if (comptime bun.Environment.isDebug) {
|
||||
const usable = mimalloc.mi_malloc_usable_size(ptr);
|
||||
@@ -79,17 +89,42 @@ pub const Borrowed = struct {
|
||||
}
|
||||
};
|
||||
|
||||
const BorrowedHeap = if (safety_checks) *DebugHeap else *mimalloc.Heap;
|
||||
|
||||
const DebugHeap = struct {
|
||||
inner: *mimalloc.Heap,
|
||||
thread_lock: bun.safety.ThreadLock,
|
||||
|
||||
pub const deinit = void;
|
||||
};
|
||||
|
||||
threadlocal var thread_heap: if (safety_checks) ?DebugHeap else void = if (safety_checks) null;
|
||||
|
||||
fn getThreadHeap() BorrowedHeap {
|
||||
if (comptime !safety_checks) return mimalloc.mi_heap_get_default();
|
||||
if (thread_heap == null) {
|
||||
thread_heap = .{
|
||||
.inner = mimalloc.mi_heap_get_default(),
|
||||
.thread_lock = .initLocked(),
|
||||
};
|
||||
}
|
||||
return &thread_heap.?;
|
||||
}
|
||||
|
||||
const log = bun.Output.scoped(.mimalloc, .hidden);
|
||||
|
||||
pub fn allocator(self: Self) std.mem.Allocator {
|
||||
self.assertThreadOwnership();
|
||||
return self.borrow().allocator();
|
||||
}
|
||||
|
||||
pub fn borrow(self: Self) Borrowed {
|
||||
return .{ .#heap = self.#heap };
|
||||
return .{ .#heap = if (comptime safety_checks) self.#heap.get() else self.#heap };
|
||||
}
|
||||
|
||||
/// Internally, mimalloc calls mi_heap_get_default()
|
||||
/// to get the default heap.
|
||||
/// It uses pthread_getspecific to do that.
|
||||
/// We can save those extra calls if we just do it once in here
|
||||
pub fn getThreadLocalDefault() std.mem.Allocator {
|
||||
if (bun.Environment.enable_asan) return bun.default_allocator;
|
||||
return Borrowed.getDefault().allocator();
|
||||
@@ -122,15 +157,22 @@ pub fn dumpStats(_: Self) void {
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
mimalloc.mi_heap_destroy(self.#heap);
|
||||
const mimalloc_heap = self.borrow().getMimallocHeap();
|
||||
if (comptime safety_checks) {
|
||||
self.#heap.deinit();
|
||||
}
|
||||
mimalloc.mi_heap_destroy(mimalloc_heap);
|
||||
self.* = undefined;
|
||||
}
|
||||
|
||||
pub fn init() Self {
|
||||
return .{
|
||||
.#heap = mimalloc.mi_heap_new() orelse bun.outOfMemory(),
|
||||
.thread_id = if (safety_checks) std.Thread.getCurrentId() else {},
|
||||
};
|
||||
const mimalloc_heap = mimalloc.mi_heap_new() orelse bun.outOfMemory();
|
||||
if (comptime !safety_checks) return .{ .#heap = mimalloc_heap };
|
||||
const heap: Owned(*DebugHeap) = .new(.{
|
||||
.inner = mimalloc_heap,
|
||||
.thread_lock = .initLocked(),
|
||||
});
|
||||
return .{ .#heap = heap };
|
||||
}
|
||||
|
||||
pub fn gc(self: Self) void {
|
||||
@@ -141,16 +183,8 @@ pub fn helpCatchMemoryIssues(self: Self) void {
|
||||
self.borrow().helpCatchMemoryIssues();
|
||||
}
|
||||
|
||||
fn assertThreadOwnership(self: Self) void {
|
||||
if (comptime safety_checks) {
|
||||
const current_thread = std.Thread.getCurrentId();
|
||||
if (current_thread != self.thread_id) {
|
||||
std.debug.panic(
|
||||
"MimallocArena used from wrong thread: arena belongs to thread {d}, but current thread is {d}",
|
||||
.{ self.thread_id, current_thread },
|
||||
);
|
||||
}
|
||||
}
|
||||
pub fn ownsPtr(self: Self, ptr: *const anyopaque) bool {
|
||||
return self.borrow().ownsPtr(ptr);
|
||||
}
|
||||
|
||||
fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
@@ -159,10 +193,13 @@ fn alignedAllocSize(ptr: [*]u8) usize {
|
||||
|
||||
fn vtable_alloc(ptr: *anyopaque, len: usize, alignment: Alignment, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
return self.alignedAlloc(len, alignment);
|
||||
}
|
||||
|
||||
fn vtable_resize(_: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
fn vtable_resize(ptr: *anyopaque, buf: []u8, _: Alignment, new_len: usize, _: usize) bool {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
self.assertThreadLock();
|
||||
return mimalloc.mi_expand(buf.ptr, new_len) != null;
|
||||
}
|
||||
|
||||
@@ -186,17 +223,39 @@ fn vtable_free(
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to expand or shrink memory, allowing relocation.
|
||||
///
|
||||
/// `memory.len` must equal the length requested from the most recent
|
||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||
/// equal the same value that was passed as the `alignment` parameter to
|
||||
/// the original `alloc` call.
|
||||
///
|
||||
/// A non-`null` return value indicates the resize was successful. The
|
||||
/// allocation may have same address, or may have been relocated. In either
|
||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||
/// indicates that the resize would be equivalent to allocating new memory,
|
||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||
/// In such case, it is more efficient for the caller to perform the copy.
|
||||
///
|
||||
/// `new_len` must be greater than zero.
|
||||
///
|
||||
/// `ret_addr` is optionally provided as the first return address of the
|
||||
/// allocation call stack. If the value is `0` it means no return address
|
||||
/// has been provided.
|
||||
fn vtable_remap(ptr: *anyopaque, buf: []u8, alignment: Alignment, new_len: usize, _: usize) ?[*]u8 {
|
||||
const self: Borrowed = .fromOpaque(ptr);
|
||||
const value = mimalloc.mi_heap_realloc_aligned(self.#heap, buf.ptr, new_len, alignment.toByteUnits());
|
||||
self.assertThreadLock();
|
||||
const heap = self.getMimallocHeap();
|
||||
const aligned_size = alignment.toByteUnits();
|
||||
const value = mimalloc.mi_heap_realloc_aligned(heap, buf.ptr, new_len, aligned_size);
|
||||
return @ptrCast(value);
|
||||
}
|
||||
|
||||
pub fn isInstance(alloc: std.mem.Allocator) bool {
|
||||
return alloc.vtable == c_allocator_vtable;
|
||||
return alloc.vtable == &c_allocator_vtable;
|
||||
}
|
||||
|
||||
const c_allocator_vtable = &std.mem.Allocator.VTable{
|
||||
const c_allocator_vtable = std.mem.Allocator.VTable{
|
||||
.alloc = vtable_alloc,
|
||||
.resize = vtable_resize,
|
||||
.remap = vtable_remap,
|
||||
@@ -209,3 +268,5 @@ const Alignment = std.mem.Alignment;
|
||||
const bun = @import("bun");
|
||||
const assert = bun.assert;
|
||||
const mimalloc = bun.mimalloc;
|
||||
const Owned = bun.ptr.Owned;
|
||||
const safety_checks = bun.Environment.ci_assert;
|
||||
|
||||
@@ -60,29 +60,17 @@ pub const Heap = opaque {
|
||||
return mi_heap_realloc(self, p, newsize);
|
||||
}
|
||||
|
||||
pub fn isOwned(self: *Heap, p: ?*const anyopaque) bool {
|
||||
return mi_heap_contains(self, p);
|
||||
pub fn isOwned(self: *Heap, p: ?*anyopaque) bool {
|
||||
return mi_heap_check_owned(self, p);
|
||||
}
|
||||
};
|
||||
pub extern fn mi_heap_new() ?*Heap;
|
||||
pub extern fn mi_heap_delete(heap: *Heap) void;
|
||||
pub extern fn mi_heap_destroy(heap: *Heap) void;
|
||||
pub extern fn mi_heap_set_default(heap: *Heap) *Heap;
|
||||
pub extern fn mi_heap_get_default() *Heap;
|
||||
pub extern fn mi_heap_get_backing() *Heap;
|
||||
pub extern fn mi_heap_collect(heap: *Heap, force: bool) void;
|
||||
pub extern fn mi_heap_main() *Heap;
|
||||
|
||||
// Thread-local heap (theap) API - new in mimalloc v3
|
||||
pub const THeap = opaque {};
|
||||
pub extern fn mi_theap_get_default() *THeap;
|
||||
pub extern fn mi_theap_set_default(theap: *THeap) *THeap;
|
||||
pub extern fn mi_theap_collect(theap: *THeap, force: bool) void;
|
||||
pub extern fn mi_theap_malloc(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_zalloc(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_calloc(theap: *THeap, count: usize, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_malloc_small(theap: *THeap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_malloc_aligned(theap: *THeap, size: usize, alignment: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_realloc(theap: *THeap, p: ?*anyopaque, newsize: usize) ?*anyopaque;
|
||||
pub extern fn mi_theap_destroy(theap: *THeap) void;
|
||||
pub extern fn mi_heap_theap(heap: *Heap) *THeap;
|
||||
pub extern fn mi_heap_malloc(heap: *Heap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_zalloc(heap: *Heap, size: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_calloc(heap: *Heap, count: usize, size: usize) ?*anyopaque;
|
||||
@@ -114,7 +102,8 @@ pub extern fn mi_heap_rezalloc_aligned(heap: *Heap, p: ?*anyopaque, newsize: usi
|
||||
pub extern fn mi_heap_rezalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newsize: usize, alignment: usize, offset: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_recalloc_aligned(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_recalloc_aligned_at(heap: *Heap, p: ?*anyopaque, newcount: usize, size: usize, alignment: usize, offset: usize) ?*anyopaque;
|
||||
pub extern fn mi_heap_contains(heap: *const Heap, p: ?*const anyopaque) bool;
|
||||
pub extern fn mi_heap_contains_block(heap: *Heap, p: *const anyopaque) bool;
|
||||
pub extern fn mi_heap_check_owned(heap: *Heap, p: *const anyopaque) bool;
|
||||
pub extern fn mi_check_owned(p: ?*const anyopaque) bool;
|
||||
pub const struct_mi_heap_area_s = extern struct {
|
||||
blocks: ?*anyopaque,
|
||||
|
||||
@@ -343,8 +343,6 @@ pub const api = struct {
|
||||
sqlite_embedded = 17,
|
||||
html = 18,
|
||||
yaml = 19,
|
||||
json5 = 20,
|
||||
md = 21,
|
||||
_,
|
||||
|
||||
pub fn jsonStringify(self: @This(), writer: anytype) !void {
|
||||
|
||||
@@ -320,8 +320,9 @@ pub const Runner = struct {
|
||||
.Null => return Expr.init(E.Null, E.Null{}, this.caller.loc),
|
||||
.Private => {
|
||||
this.is_top_level = false;
|
||||
if (this.visited.get(value)) |cached| {
|
||||
return cached;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
var blob_: ?*const jsc.WebCore.Blob = null;
|
||||
@@ -469,8 +470,9 @@ pub const Runner = struct {
|
||||
return Expr.init(E.String, E.String.init(out_slice), this.caller.loc);
|
||||
},
|
||||
.Promise => {
|
||||
if (this.visited.get(value)) |cached| {
|
||||
return cached;
|
||||
const _entry = this.visited.getOrPut(this.allocator, value) catch unreachable;
|
||||
if (_entry.found_existing) {
|
||||
return _entry.value_ptr.*;
|
||||
}
|
||||
|
||||
const promise = value.asAnyPromise() orelse @panic("Unexpected promise type");
|
||||
@@ -492,7 +494,7 @@ pub const Runner = struct {
|
||||
this.is_top_level = false;
|
||||
const result = try this.run(promise_result);
|
||||
|
||||
this.visited.put(this.allocator, value, result) catch unreachable;
|
||||
_entry.value_ptr.* = result;
|
||||
return result;
|
||||
},
|
||||
else => {},
|
||||
|
||||
@@ -6467,11 +6467,6 @@ pub fn NewParser_(
|
||||
parts.items[0].stmts = top_level_stmts;
|
||||
}
|
||||
|
||||
// REPL mode transforms
|
||||
if (p.options.repl_mode) {
|
||||
try repl_transforms.ReplTransforms(P).apply(p, parts, allocator);
|
||||
}
|
||||
|
||||
var top_level_symbols_to_parts = js_ast.Ast.TopLevelSymbolToParts{};
|
||||
var top_level = &top_level_symbols_to_parts;
|
||||
|
||||
@@ -6532,9 +6527,7 @@ pub fn NewParser_(
|
||||
break :brk p.hmr_api_ref;
|
||||
}
|
||||
|
||||
// When code splitting is enabled, always create wrapper_ref to match esbuild behavior.
|
||||
// Otherwise, use needsWrapperRef() to optimize away unnecessary wrappers.
|
||||
if (p.options.bundle and (p.options.code_splitting or p.needsWrapperRef(parts.items))) {
|
||||
if (p.options.bundle and p.needsWrapperRef(parts.items)) {
|
||||
break :brk p.newSymbol(
|
||||
.other,
|
||||
std.fmt.allocPrint(
|
||||
@@ -6767,8 +6760,6 @@ var falseValueExpr = Expr.Data{ .e_boolean = E.Boolean{ .value = false } };
|
||||
|
||||
const string = []const u8;
|
||||
|
||||
const repl_transforms = @import("./repl_transforms.zig");
|
||||
|
||||
const Define = @import("../defines.zig").Define;
|
||||
const DefineData = @import("../defines.zig").DefineData;
|
||||
|
||||
|
||||
@@ -19,7 +19,6 @@ pub const Parser = struct {
|
||||
|
||||
tree_shaking: bool = false,
|
||||
bundle: bool = false,
|
||||
code_splitting: bool = false,
|
||||
package_version: string = "",
|
||||
|
||||
macro_context: *MacroContextType() = undefined,
|
||||
@@ -39,13 +38,6 @@ pub const Parser = struct {
|
||||
/// able to customize what import sources are used.
|
||||
framework: ?*bun.bake.Framework = null,
|
||||
|
||||
/// REPL mode: transforms code for interactive evaluation
|
||||
/// - Wraps lone object literals `{...}` in parentheses
|
||||
/// - Hoists variable declarations for REPL persistence
|
||||
/// - Wraps last expression in { value: expr } for result capture
|
||||
/// - Wraps code with await in async IIFE
|
||||
repl_mode: bool = false,
|
||||
|
||||
pub fn hashForRuntimeTranspiler(this: *const Options, hasher: *std.hash.Wyhash, did_use_jsx: bool) void {
|
||||
bun.assert(!this.bundle);
|
||||
|
||||
|
||||
@@ -1,365 +0,0 @@
|
||||
/// REPL Transform module - transforms code for interactive REPL evaluation
|
||||
///
|
||||
/// This module provides transformations for REPL mode:
|
||||
/// - Wraps the last expression in { value: expr } for result capture
|
||||
/// - Wraps code with await in async IIFE with variable hoisting
|
||||
/// - Hoists declarations for variable persistence across REPL lines
|
||||
pub fn ReplTransforms(comptime P: type) type {
|
||||
return struct {
|
||||
const Self = @This();
|
||||
|
||||
/// Apply REPL-mode transforms to the AST.
|
||||
/// This transforms code for interactive evaluation:
|
||||
/// - Wraps the last expression in { value: expr } for result capture
|
||||
/// - Wraps code with await in async IIFE with variable hoisting
|
||||
pub fn apply(p: *P, parts: *ListManaged(js_ast.Part), allocator: Allocator) !void {
|
||||
// Skip transform if there's a top-level return (indicates module pattern)
|
||||
if (p.has_top_level_return) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Collect all statements
|
||||
var total_stmts_count: usize = 0;
|
||||
for (parts.items) |part| {
|
||||
total_stmts_count += part.stmts.len;
|
||||
}
|
||||
|
||||
if (total_stmts_count == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if there's top-level await
|
||||
const has_top_level_await = p.top_level_await_keyword.len > 0;
|
||||
|
||||
// Collect all statements into a single array
|
||||
var all_stmts = bun.handleOom(allocator.alloc(Stmt, total_stmts_count));
|
||||
var stmt_idx: usize = 0;
|
||||
for (parts.items) |part| {
|
||||
for (part.stmts) |stmt| {
|
||||
all_stmts[stmt_idx] = stmt;
|
||||
stmt_idx += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Apply transform with is_async based on presence of top-level await
|
||||
try transformWithHoisting(p, parts, all_stmts, allocator, has_top_level_await);
|
||||
}
|
||||
|
||||
/// Transform code with hoisting and IIFE wrapper
|
||||
/// @param is_async: true for async IIFE (when top-level await present), false for sync IIFE
|
||||
fn transformWithHoisting(
|
||||
p: *P,
|
||||
parts: *ListManaged(js_ast.Part),
|
||||
all_stmts: []Stmt,
|
||||
allocator: Allocator,
|
||||
is_async: bool,
|
||||
) !void {
|
||||
if (all_stmts.len == 0) return;
|
||||
|
||||
// Lists for hoisted declarations and inner statements
|
||||
var hoisted_stmts = ListManaged(Stmt).init(allocator);
|
||||
var inner_stmts = ListManaged(Stmt).init(allocator);
|
||||
try hoisted_stmts.ensureTotalCapacity(all_stmts.len);
|
||||
try inner_stmts.ensureTotalCapacity(all_stmts.len);
|
||||
|
||||
// Process each statement - hoist all declarations for REPL persistence
|
||||
for (all_stmts) |stmt| {
|
||||
switch (stmt.data) {
|
||||
.s_local => |local| {
|
||||
// Hoist all declarations as var so they become context properties
|
||||
// In sloppy mode, var at top level becomes a property of the global/context object
|
||||
// This is essential for REPL variable persistence across vm.runInContext calls
|
||||
const kind: S.Local.Kind = .k_var;
|
||||
|
||||
// Extract individual identifiers from binding patterns for hoisting
|
||||
var hoisted_decl_list = ListManaged(G.Decl).init(allocator);
|
||||
for (local.decls.slice()) |decl| {
|
||||
try extractIdentifiersFromBinding(p, decl.binding, &hoisted_decl_list);
|
||||
}
|
||||
|
||||
if (hoisted_decl_list.items.len > 0) {
|
||||
try hoisted_stmts.append(p.s(S.Local{
|
||||
.kind = kind,
|
||||
.decls = Decl.List.fromOwnedSlice(hoisted_decl_list.items),
|
||||
}, stmt.loc));
|
||||
}
|
||||
|
||||
// Create assignment expressions for the inner statements
|
||||
for (local.decls.slice()) |decl| {
|
||||
if (decl.value) |value| {
|
||||
// Create assignment expression: binding = value
|
||||
const assign_expr = createBindingAssignment(p, decl.binding, value, allocator);
|
||||
try inner_stmts.append(p.s(S.SExpr{ .value = assign_expr }, stmt.loc));
|
||||
}
|
||||
}
|
||||
},
|
||||
.s_function => |func| {
|
||||
// For function declarations:
|
||||
// Hoist as: var funcName;
|
||||
// Inner: this.funcName = funcName; function funcName() {}
|
||||
if (func.func.name) |name_loc| {
|
||||
try hoisted_stmts.append(p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
|
||||
G.Decl{
|
||||
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
|
||||
.value = null,
|
||||
},
|
||||
}))),
|
||||
}, stmt.loc));
|
||||
|
||||
// Add this.funcName = funcName assignment
|
||||
const this_expr = p.newExpr(E.This{}, stmt.loc);
|
||||
const this_dot = p.newExpr(E.Dot{
|
||||
.target = this_expr,
|
||||
.name = p.symbols.items[name_loc.ref.?.innerIndex()].original_name,
|
||||
.name_loc = name_loc.loc,
|
||||
}, stmt.loc);
|
||||
const func_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
|
||||
const assign = p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = this_dot,
|
||||
.right = func_id,
|
||||
}, stmt.loc);
|
||||
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
|
||||
}
|
||||
// Add the function declaration itself
|
||||
try inner_stmts.append(stmt);
|
||||
},
|
||||
.s_class => |class| {
|
||||
// For class declarations:
|
||||
// Hoist as: var ClassName; (use var so it persists to vm context)
|
||||
// Inner: ClassName = class ClassName {}
|
||||
if (class.class.class_name) |name_loc| {
|
||||
try hoisted_stmts.append(p.s(S.Local{
|
||||
.kind = .k_var,
|
||||
.decls = Decl.List.fromOwnedSlice(bun.handleOom(allocator.dupe(G.Decl, &.{
|
||||
G.Decl{
|
||||
.binding = p.b(B.Identifier{ .ref = name_loc.ref.? }, name_loc.loc),
|
||||
.value = null,
|
||||
},
|
||||
}))),
|
||||
}, stmt.loc));
|
||||
|
||||
// Convert class declaration to assignment: ClassName = class ClassName {}
|
||||
const class_expr = p.newExpr(class.class, stmt.loc);
|
||||
const class_id = p.newExpr(E.Identifier{ .ref = name_loc.ref.? }, name_loc.loc);
|
||||
const assign = p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = class_id,
|
||||
.right = class_expr,
|
||||
}, stmt.loc);
|
||||
try inner_stmts.append(p.s(S.SExpr{ .value = assign }, stmt.loc));
|
||||
} else {
|
||||
try inner_stmts.append(stmt);
|
||||
}
|
||||
},
|
||||
.s_directive => |directive| {
|
||||
// In REPL mode, treat directives (string literals) as expressions
|
||||
const str_expr = p.newExpr(E.String{ .data = directive.value }, stmt.loc);
|
||||
try inner_stmts.append(p.s(S.SExpr{ .value = str_expr }, stmt.loc));
|
||||
},
|
||||
else => {
|
||||
try inner_stmts.append(stmt);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Wrap the last expression in return { value: expr }
|
||||
wrapLastExpressionWithReturn(p, &inner_stmts, allocator);
|
||||
|
||||
// Create the IIFE: (() => { ...inner_stmts... })() or (async () => { ... })()
|
||||
const arrow = p.newExpr(E.Arrow{
|
||||
.args = &.{},
|
||||
.body = .{ .loc = logger.Loc.Empty, .stmts = inner_stmts.items },
|
||||
.is_async = is_async,
|
||||
}, logger.Loc.Empty);
|
||||
|
||||
const iife = p.newExpr(E.Call{
|
||||
.target = arrow,
|
||||
.args = ExprNodeList{},
|
||||
}, logger.Loc.Empty);
|
||||
|
||||
// Final output: hoisted declarations + IIFE call
|
||||
const final_stmts_count = hoisted_stmts.items.len + 1;
|
||||
var final_stmts = bun.handleOom(allocator.alloc(Stmt, final_stmts_count));
|
||||
for (hoisted_stmts.items, 0..) |stmt, j| {
|
||||
final_stmts[j] = stmt;
|
||||
}
|
||||
final_stmts[hoisted_stmts.items.len] = p.s(S.SExpr{ .value = iife }, logger.Loc.Empty);
|
||||
|
||||
// Update parts
|
||||
if (parts.items.len > 0) {
|
||||
parts.items[0].stmts = final_stmts;
|
||||
parts.items.len = 1;
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrap the last expression in return { value: expr }
|
||||
fn wrapLastExpressionWithReturn(p: *P, inner_stmts: *ListManaged(Stmt), allocator: Allocator) void {
|
||||
if (inner_stmts.items.len > 0) {
|
||||
var last_idx: usize = inner_stmts.items.len;
|
||||
while (last_idx > 0) {
|
||||
last_idx -= 1;
|
||||
const last_stmt = inner_stmts.items[last_idx];
|
||||
switch (last_stmt.data) {
|
||||
.s_empty, .s_comment => continue,
|
||||
.s_expr => |expr_data| {
|
||||
// Wrap in return { value: expr }
|
||||
const wrapped = wrapExprInValueObject(p, expr_data.value, allocator);
|
||||
inner_stmts.items[last_idx] = p.s(S.Return{ .value = wrapped }, last_stmt.loc);
|
||||
break;
|
||||
},
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract individual identifiers from a binding pattern for hoisting
|
||||
fn extractIdentifiersFromBinding(p: *P, binding: Binding, decls: *ListManaged(G.Decl)) !void {
|
||||
switch (binding.data) {
|
||||
.b_identifier => |ident| {
|
||||
try decls.append(G.Decl{
|
||||
.binding = p.b(B.Identifier{ .ref = ident.ref }, binding.loc),
|
||||
.value = null,
|
||||
});
|
||||
},
|
||||
.b_array => |arr| {
|
||||
for (arr.items) |item| {
|
||||
try extractIdentifiersFromBinding(p, item.binding, decls);
|
||||
}
|
||||
},
|
||||
.b_object => |obj| {
|
||||
for (obj.properties) |prop| {
|
||||
try extractIdentifiersFromBinding(p, prop.value, decls);
|
||||
}
|
||||
},
|
||||
.b_missing => {},
|
||||
}
|
||||
}
|
||||
|
||||
/// Create { __proto__: null, value: expr } wrapper object
|
||||
/// Uses null prototype to create a clean data object
|
||||
fn wrapExprInValueObject(p: *P, expr: Expr, allocator: Allocator) Expr {
|
||||
var properties = bun.handleOom(allocator.alloc(G.Property, 2));
|
||||
// __proto__: null - creates null-prototype object
|
||||
properties[0] = G.Property{
|
||||
.key = p.newExpr(E.String{ .data = "__proto__" }, expr.loc),
|
||||
.value = p.newExpr(E.Null{}, expr.loc),
|
||||
};
|
||||
// value: expr - the actual result value
|
||||
properties[1] = G.Property{
|
||||
.key = p.newExpr(E.String{ .data = "value" }, expr.loc),
|
||||
.value = expr,
|
||||
};
|
||||
return p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromOwnedSlice(properties),
|
||||
}, expr.loc);
|
||||
}
|
||||
|
||||
/// Create assignment expression from binding pattern
|
||||
fn createBindingAssignment(p: *P, binding: Binding, value: Expr, allocator: Allocator) Expr {
|
||||
switch (binding.data) {
|
||||
.b_identifier => |ident| {
|
||||
return p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc),
|
||||
.right = value,
|
||||
}, binding.loc);
|
||||
},
|
||||
.b_array => {
|
||||
// For array destructuring, create: [a, b] = value
|
||||
return p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = convertBindingToExpr(p, binding, allocator),
|
||||
.right = value,
|
||||
}, binding.loc);
|
||||
},
|
||||
.b_object => {
|
||||
// For object destructuring, create: {a, b} = value
|
||||
return p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = convertBindingToExpr(p, binding, allocator),
|
||||
.right = value,
|
||||
}, binding.loc);
|
||||
},
|
||||
.b_missing => {
|
||||
// Return Missing expression to match convertBindingToExpr
|
||||
return p.newExpr(E.Missing{}, binding.loc);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a binding pattern to an expression (for assignment targets)
|
||||
/// Handles spread/rest patterns in arrays and objects to match Binding.toExpr behavior
|
||||
fn convertBindingToExpr(p: *P, binding: Binding, allocator: Allocator) Expr {
|
||||
switch (binding.data) {
|
||||
.b_identifier => |ident| {
|
||||
return p.newExpr(E.Identifier{ .ref = ident.ref }, binding.loc);
|
||||
},
|
||||
.b_array => |arr| {
|
||||
var items = bun.handleOom(allocator.alloc(Expr, arr.items.len));
|
||||
for (arr.items, 0..) |item, i| {
|
||||
const expr = convertBindingToExpr(p, item.binding, allocator);
|
||||
// Check for spread pattern: if has_spread and this is the last element
|
||||
if (arr.has_spread and i == arr.items.len - 1) {
|
||||
items[i] = p.newExpr(E.Spread{ .value = expr }, expr.loc);
|
||||
} else if (item.default_value) |default_val| {
|
||||
items[i] = p.newExpr(E.Binary{
|
||||
.op = .bin_assign,
|
||||
.left = expr,
|
||||
.right = default_val,
|
||||
}, item.binding.loc);
|
||||
} else {
|
||||
items[i] = expr;
|
||||
}
|
||||
}
|
||||
return p.newExpr(E.Array{
|
||||
.items = ExprNodeList.fromOwnedSlice(items),
|
||||
.is_single_line = arr.is_single_line,
|
||||
}, binding.loc);
|
||||
},
|
||||
.b_object => |obj| {
|
||||
var properties = bun.handleOom(allocator.alloc(G.Property, obj.properties.len));
|
||||
for (obj.properties, 0..) |prop, i| {
|
||||
properties[i] = G.Property{
|
||||
.flags = prop.flags,
|
||||
.key = prop.key,
|
||||
// Set kind to .spread if the property has spread flag
|
||||
.kind = if (prop.flags.contains(.is_spread)) .spread else .normal,
|
||||
.value = convertBindingToExpr(p, prop.value, allocator),
|
||||
.initializer = prop.default_value,
|
||||
};
|
||||
}
|
||||
return p.newExpr(E.Object{
|
||||
.properties = G.Property.List.fromOwnedSlice(properties),
|
||||
.is_single_line = obj.is_single_line,
|
||||
}, binding.loc);
|
||||
},
|
||||
.b_missing => {
|
||||
return p.newExpr(E.Missing{}, binding.loc);
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
const Allocator = std.mem.Allocator;
|
||||
const ListManaged = std.array_list.Managed;
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
|
||||
const js_ast = bun.ast;
|
||||
const B = js_ast.B;
|
||||
const Binding = js_ast.Binding;
|
||||
const E = js_ast.E;
|
||||
const Expr = js_ast.Expr;
|
||||
const ExprNodeList = js_ast.ExprNodeList;
|
||||
const S = js_ast.S;
|
||||
const Stmt = js_ast.Stmt;
|
||||
|
||||
const G = js_ast.G;
|
||||
const Decl = G.Decl;
|
||||
@@ -1355,7 +1355,7 @@ fn computeArgumentsForFrameworkRequest(
|
||||
const relative_path_buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(relative_path_buf);
|
||||
var route_name = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, keys[fromOpaqueFileId(.server, route.file_page.unwrap().?).get()]));
|
||||
try arr.putIndex(global, 0, try route_name.transferToJS(global));
|
||||
try arr.putIndex(global, 0, route_name.transferToJS(global));
|
||||
}
|
||||
n = 1;
|
||||
while (true) {
|
||||
@@ -1366,7 +1366,7 @@ fn computeArgumentsForFrameworkRequest(
|
||||
relative_path_buf,
|
||||
keys[fromOpaqueFileId(.server, layout).get()],
|
||||
));
|
||||
try arr.putIndex(global, @intCast(n), try layout_name.transferToJS(global));
|
||||
try arr.putIndex(global, @intCast(n), layout_name.transferToJS(global));
|
||||
n += 1;
|
||||
}
|
||||
route = dev.router.routePtr(route.parent.unwrap() orelse break);
|
||||
@@ -1383,7 +1383,7 @@ fn computeArgumentsForFrameworkRequest(
|
||||
std.mem.asBytes(&generation),
|
||||
}) catch |err| bun.handleOom(err);
|
||||
defer str.deref();
|
||||
const js = try str.toJS(dev.vm.global);
|
||||
const js = str.toJS(dev.vm.global);
|
||||
framework_bundle.cached_client_bundle_url = .create(js, dev.vm.global);
|
||||
break :str js;
|
||||
},
|
||||
@@ -2091,7 +2091,7 @@ fn generateCssJSArray(dev: *DevServer, route_bundle: *RouteBundle) bun.JSError!j
|
||||
}) catch unreachable;
|
||||
const str = bun.String.cloneUTF8(path);
|
||||
defer str.deref();
|
||||
try arr.putIndex(dev.vm.global, @intCast(i), try str.toJS(dev.vm.global));
|
||||
try arr.putIndex(dev.vm.global, @intCast(i), str.toJS(dev.vm.global));
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
@@ -2136,7 +2136,7 @@ fn makeArrayForServerComponentsPatch(dev: *DevServer, global: *jsc.JSGlobalObjec
|
||||
defer bun.path_buffer_pool.put(relative_path_buf);
|
||||
const str = bun.String.cloneUTF8(dev.relativePath(relative_path_buf, names[item.get()]));
|
||||
defer str.deref();
|
||||
try arr.putIndex(global, @intCast(i), try str.toJS(global));
|
||||
try arr.putIndex(global, @intCast(i), str.toJS(global));
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
@@ -2845,7 +2845,7 @@ pub fn finalizeBundle(
|
||||
|
||||
if (dev.bundling_failures.count() == 0) {
|
||||
if (current_bundle.had_reload_event) {
|
||||
const clear_terminal = !debug.isVisible() and !dev.vm.transpiler.env.hasSetNoClearTerminalOnReload(false);
|
||||
const clear_terminal = !debug.isVisible();
|
||||
if (clear_terminal) {
|
||||
Output.disableBuffering();
|
||||
Output.resetTerminalAll();
|
||||
|
||||
@@ -48,7 +48,6 @@ pub fn trackResolutionFailure(store: *DirectoryWatchStore, import_source: []cons
|
||||
.jsonc,
|
||||
.toml,
|
||||
.yaml,
|
||||
.json5,
|
||||
.wasm,
|
||||
.napi,
|
||||
.base64,
|
||||
@@ -57,7 +56,6 @@ pub fn trackResolutionFailure(store: *DirectoryWatchStore, import_source: []cons
|
||||
.bunsh,
|
||||
.sqlite,
|
||||
.sqlite_embedded,
|
||||
.md,
|
||||
=> bun.debugAssert(false),
|
||||
}
|
||||
|
||||
|
||||
@@ -839,7 +839,7 @@ pub const MatchedParams = struct {
|
||||
const value_str = bun.String.cloneUTF8(param.value);
|
||||
defer value_str.deref();
|
||||
|
||||
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global) catch unreachable) catch unreachable;
|
||||
_ = obj.putBunStringOneOrArray(global, &key_str, value_str.toJS(global)) catch unreachable;
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
@@ -1247,7 +1247,7 @@ pub const JSFrameworkRouter = struct {
|
||||
for (params_out.params.slice()) |param| {
|
||||
const value = bun.String.cloneUTF8(param.value);
|
||||
defer value.deref();
|
||||
obj.put(global, param.key, try value.toJS(global));
|
||||
obj.put(global, param.key, value.toJS(global));
|
||||
}
|
||||
break :params obj;
|
||||
} else .null,
|
||||
@@ -1271,8 +1271,8 @@ pub const JSFrameworkRouter = struct {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return (try jsc.JSObject.create(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = try jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = try jsfr.fileIdToJS(global, route.file_layout),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
|
||||
.children = brk: {
|
||||
var len: usize = 0;
|
||||
@@ -1295,8 +1295,8 @@ pub const JSFrameworkRouter = struct {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return (try jsc.JSObject.create(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = try jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = try jsfr.fileIdToJS(global, route.file_layout),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
// .notFound = jsfr.fileIdToJS(global, route.file_not_found),
|
||||
.parent = if (route.parent.unwrap()) |parent|
|
||||
try routeToJsonInverse(jsfr, global, parent, allocator)
|
||||
@@ -1341,8 +1341,8 @@ pub const JSFrameworkRouter = struct {
|
||||
|
||||
var out = bun.String.init(rendered.items);
|
||||
const obj = JSValue.createEmptyObject(global, 2);
|
||||
obj.put(global, "kind", try bun.String.static(@tagName(parsed.kind)).toJS(global));
|
||||
obj.put(global, "pattern", try out.transferToJS(global));
|
||||
obj.put(global, "kind", bun.String.static(@tagName(parsed.kind)).toJS(global));
|
||||
obj.put(global, "pattern", out.transferToJS(global));
|
||||
return obj;
|
||||
}
|
||||
|
||||
@@ -1352,7 +1352,7 @@ pub const JSFrameworkRouter = struct {
|
||||
var it = pattern.iterate();
|
||||
while (it.next()) |part| try part.toStringForInternalUse(rendered.writer());
|
||||
var str = bun.String.cloneUTF8(rendered.items);
|
||||
return try str.transferToJS(global);
|
||||
return str.transferToJS(global);
|
||||
}
|
||||
|
||||
fn partToJS(global: *JSGlobalObject, part: Part, temp_allocator: Allocator) !JSValue {
|
||||
@@ -1360,7 +1360,7 @@ pub const JSFrameworkRouter = struct {
|
||||
defer rendered.deinit();
|
||||
try part.toStringForInternalUse(rendered.writer());
|
||||
var str = bun.String.cloneUTF8(rendered.items);
|
||||
return try str.transferToJS(global);
|
||||
return str.transferToJS(global);
|
||||
}
|
||||
|
||||
pub fn getFileIdForRouter(jsfr: *JSFrameworkRouter, abs_path: []const u8, _: Route.Index, _: Route.FileKind) !OpaqueFileId {
|
||||
@@ -1377,7 +1377,7 @@ pub const JSFrameworkRouter = struct {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) bun.JSError!JSValue {
|
||||
pub fn fileIdToJS(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, id: OpaqueFileId.Optional) JSValue {
|
||||
return jsfr.files.items[(id.unwrap() orelse return .null).get()].toJS(global);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -182,7 +182,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.pending => unreachable,
|
||||
.fulfilled => |resolved| config: {
|
||||
bun.assert(resolved.isUndefined());
|
||||
const default = BakeGetDefaultExportFromModule(vm.global, try config_entry_point_string.toJS(vm.global));
|
||||
const default = BakeGetDefaultExportFromModule(vm.global, config_entry_point_string.toJS(vm.global));
|
||||
|
||||
if (!default.isObject()) {
|
||||
return global.throwInvalidArguments(
|
||||
@@ -433,7 +433,6 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
.asset => {},
|
||||
.bytecode => {},
|
||||
.sourcemap => {},
|
||||
.@"metafile-json", .@"metafile-markdown" => {},
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -486,7 +485,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
|
||||
for (router.types, 0..) |router_type, i| {
|
||||
if (router_type.client_file.unwrap()) |client_file| {
|
||||
const str = try (try bun.String.createFormat("{s}{s}", .{
|
||||
const str = (try bun.String.createFormat("{s}{s}", .{
|
||||
public_path,
|
||||
pt.outputFile(client_file).dest_path,
|
||||
})).toJS(global);
|
||||
@@ -543,7 +542,7 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
bun.assert(output_file.dest_path[0] != '.');
|
||||
// CSS chunks must be in contiguous order!!
|
||||
bun.assert(output_file.loader.isCSS());
|
||||
str.* = try (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
|
||||
str.* = (try bun.String.createFormat("{s}{s}", .{ public_path, output_file.dest_path })).toJS(global);
|
||||
}
|
||||
|
||||
// Route URL patterns with parameter placeholders.
|
||||
@@ -660,10 +659,10 @@ pub fn buildWithVm(ctx: bun.cli.Command.Context, cwd: []const u8, vm: *VirtualMa
|
||||
// Init the items
|
||||
var pattern_string = bun.String.cloneUTF8(pattern.slice());
|
||||
defer pattern_string.deref();
|
||||
try route_patterns.putIndex(global, @intCast(nav_index), try pattern_string.toJS(global));
|
||||
try route_patterns.putIndex(global, @intCast(nav_index), pattern_string.toJS(global));
|
||||
|
||||
var src_path = bun.String.cloneUTF8(bun.path.relative(cwd, pt.inputFile(main_file_route_index).absPath()));
|
||||
try route_source_files.putIndex(global, @intCast(nav_index), try src_path.transferToJS(global));
|
||||
try route_source_files.putIndex(global, @intCast(nav_index), src_path.transferToJS(global));
|
||||
|
||||
try route_nested_files.putIndex(global, @intCast(nav_index), file_list);
|
||||
try route_type_and_flags.putIndex(global, @intCast(nav_index), JSValue.jsNumberFromInt32(@bitCast(TypeAndFlags{
|
||||
@@ -994,7 +993,7 @@ pub const PerThread = struct {
|
||||
return try loadModule(
|
||||
pt.vm,
|
||||
pt.vm.global,
|
||||
try pt.module_keys[id.get()].toJS(pt.vm.global),
|
||||
pt.module_keys[id.get()].toJS(pt.vm.global),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1011,7 +1010,7 @@ pub const PerThread = struct {
|
||||
try pt.all_server_files.putIndex(
|
||||
pt.vm.global,
|
||||
@intCast(id.get()),
|
||||
try pt.module_keys[id.get()].toJS(pt.vm.global),
|
||||
pt.module_keys[id.get()].toJS(pt.vm.global),
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -283,25 +283,11 @@ pub const Run = struct {
|
||||
vm.cpu_profiler_config = CPUProfiler.CPUProfilerConfig{
|
||||
.name = cpu_prof_opts.name,
|
||||
.dir = cpu_prof_opts.dir,
|
||||
.md_format = cpu_prof_opts.md_format,
|
||||
.json_format = cpu_prof_opts.json_format,
|
||||
};
|
||||
CPUProfiler.startCPUProfiler(vm.jsc_vm);
|
||||
bun.analytics.Features.cpu_profile += 1;
|
||||
}
|
||||
|
||||
// Set up heap profiler config if enabled (actual profiling happens on exit)
|
||||
if (this.ctx.runtime_options.heap_prof.enabled) {
|
||||
const heap_prof_opts = this.ctx.runtime_options.heap_prof;
|
||||
|
||||
vm.heap_profiler_config = HeapProfiler.HeapProfilerConfig{
|
||||
.name = heap_prof_opts.name,
|
||||
.dir = heap_prof_opts.dir,
|
||||
.text_format = heap_prof_opts.text_format,
|
||||
};
|
||||
bun.analytics.Features.heap_snapshot += 1;
|
||||
}
|
||||
|
||||
this.addConditionalGlobals();
|
||||
do_redis_preconnect: {
|
||||
// This must happen within the API lock, which is why it's not in the "doPreconnect" function
|
||||
@@ -563,7 +549,6 @@ const VirtualMachine = jsc.VirtualMachine;
|
||||
const string = []const u8;
|
||||
|
||||
const CPUProfiler = @import("./bun.js/bindings/BunCPUProfiler.zig");
|
||||
const HeapProfiler = @import("./bun.js/bindings/BunHeapProfiler.zig");
|
||||
const options = @import("./options.zig");
|
||||
const std = @import("std");
|
||||
const Command = @import("./cli.zig").Command;
|
||||
|
||||
@@ -417,7 +417,7 @@ pub const AsyncModule = struct {
|
||||
jsc.markBinding(@src());
|
||||
var specifier = specifier_;
|
||||
var referrer = referrer_;
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
scope.init(globalThis, @src());
|
||||
defer {
|
||||
specifier.deref();
|
||||
|
||||
@@ -94,7 +94,7 @@ pub const BuildMessage = struct {
|
||||
_: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
var object = jsc.JSValue.createEmptyObject(globalThis, 4);
|
||||
object.put(globalThis, ZigString.static("name"), try bun.String.static("BuildMessage").toJS(globalThis));
|
||||
object.put(globalThis, ZigString.static("name"), bun.String.static("BuildMessage").toJS(globalThis));
|
||||
object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
|
||||
object.put(globalThis, ZigString.static("message"), this.getMessage(globalThis));
|
||||
object.put(globalThis, ZigString.static("level"), this.getLevel(globalThis));
|
||||
|
||||
@@ -30,20 +30,45 @@ pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, input_path: []const u8, extname: []const u8) ?[]const u8 {
|
||||
fn resolveEmbeddedNodeFileViaMemfd(file: *bun.StandaloneModuleGraph.File, path_buffer: *bun.PathBuffer, fd: *i32) ![]const u8 {
|
||||
var label_buf: [128]u8 = undefined;
|
||||
const count = struct {
|
||||
pub var counter = std.atomic.Value(u32).init(0);
|
||||
pub fn get() u32 {
|
||||
return counter.fetchAdd(1, .seq_cst);
|
||||
}
|
||||
}.get();
|
||||
const label = std.fmt.bufPrintZ(&label_buf, "node-addon-{d}", .{count}) catch "";
|
||||
const memfd = try bun.sys.memfd_create(label, .executable).unwrap();
|
||||
errdefer memfd.close();
|
||||
|
||||
fd.* = @intCast(memfd.cast());
|
||||
errdefer fd.* = -1;
|
||||
|
||||
try bun.sys.ftruncate(memfd, @intCast(file.contents.len)).unwrap();
|
||||
try bun.sys.File.writeAll(.{ .handle = memfd }, file.contents).unwrap();
|
||||
|
||||
return try std.fmt.bufPrint(path_buffer, "/proc/self/fd/{d}", .{memfd.cast()});
|
||||
}
|
||||
|
||||
pub fn resolveEmbeddedFile(vm: *VirtualMachine, path_buf: *bun.PathBuffer, linux_memfd: *i32, input_path: []const u8, extname: []const u8) ?[]const u8 {
|
||||
if (input_path.len == 0) return null;
|
||||
var graph = vm.standalone_module_graph orelse return null;
|
||||
const file = graph.find(input_path) orelse return null;
|
||||
|
||||
if (comptime Environment.isLinux) {
|
||||
// TODO: use /proc/fd/12346 instead! Avoid the copy!
|
||||
// Best-effort: use memfd to avoid hitting the disk
|
||||
if (resolveEmbeddedNodeFileViaMemfd(file, path_buf, linux_memfd)) |path| {
|
||||
return path;
|
||||
} else |_| {
|
||||
// fall back to temp file
|
||||
}
|
||||
}
|
||||
|
||||
// atomically write to a tmpfile and then move it to the final destination
|
||||
const tmpname_buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(tmpname_buf);
|
||||
const tmpfilename = bun.fs.FileSystem.tmpname(extname, tmpname_buf, bun.hash(file.name)) catch return null;
|
||||
|
||||
const tmpdir: bun.FD = .fromStdDir(bun.fs.FileSystem.instance.tmpdir() catch return null);
|
||||
|
||||
// First we open the tmpfile, to avoid any other work in the event of failure.
|
||||
@@ -100,7 +125,7 @@ pub fn transpileSourceCode(
|
||||
const disable_transpilying = comptime flags.disableTranspiling();
|
||||
|
||||
if (comptime disable_transpilying) {
|
||||
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .json5 or loader == .text or loader == .json or loader == .jsonc)) {
|
||||
if (!(loader.isJavaScriptLike() or loader == .toml or loader == .yaml or loader == .text or loader == .json or loader == .jsonc)) {
|
||||
// Don't print "export default <file path>"
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
@@ -112,7 +137,7 @@ pub fn transpileSourceCode(
|
||||
}
|
||||
|
||||
switch (loader) {
|
||||
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .json5, .text, .md => {
|
||||
.js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .yaml, .text => {
|
||||
// Ensure that if there was an ASTMemoryAllocator in use, it's not used anymore.
|
||||
var ast_scope = js_ast.ASTMemoryAllocator.Scope{};
|
||||
ast_scope.enter();
|
||||
@@ -361,7 +386,7 @@ pub fn transpileSourceCode(
|
||||
};
|
||||
}
|
||||
|
||||
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml or loader == .json5) {
|
||||
if (loader == .json or loader == .jsonc or loader == .toml or loader == .yaml) {
|
||||
if (parse_result.empty) {
|
||||
return ResolvedSource{
|
||||
.allocator = null,
|
||||
@@ -1301,14 +1326,14 @@ pub const FetchFlags = enum {
|
||||
};
|
||||
|
||||
/// Support embedded .node files
|
||||
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool {
|
||||
export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String, linux_memfd_fd_to_close: *i32) bool {
|
||||
if (vm.standalone_module_graph == null) return false;
|
||||
|
||||
const input_path = in_out_str.toUTF8(bun.default_allocator);
|
||||
defer input_path.deinit();
|
||||
const path_buf = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(path_buf);
|
||||
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buf, input_path.slice(), "node") orelse return false;
|
||||
const path_buffer = bun.path_buffer_pool.get();
|
||||
defer bun.path_buffer_pool.put(path_buffer);
|
||||
const result = ModuleLoader.resolveEmbeddedFile(vm, path_buffer, linux_memfd_fd_to_close, input_path.slice(), "node") orelse return false;
|
||||
in_out_str.* = bun.String.cloneUTF8(result);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ pub const ResolveMessage = struct {
|
||||
return globalThis.throw("ResolveMessage is not constructable", .{});
|
||||
}
|
||||
|
||||
pub fn getCode(this: *ResolveMessage, globalObject: *jsc.JSGlobalObject) bun.JSError!jsc.JSValue {
|
||||
pub fn getCode(this: *ResolveMessage, globalObject: *jsc.JSGlobalObject) jsc.JSValue {
|
||||
switch (this.msg.metadata) {
|
||||
.resolve => |resolve| {
|
||||
const code: []const u8 = brk: {
|
||||
@@ -153,7 +153,7 @@ pub const ResolveMessage = struct {
|
||||
_: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
var object = jsc.JSValue.createEmptyObject(globalThis, 7);
|
||||
object.put(globalThis, ZigString.static("name"), try bun.String.static("ResolveMessage").toJS(globalThis));
|
||||
object.put(globalThis, ZigString.static("name"), bun.String.static("ResolveMessage").toJS(globalThis));
|
||||
object.put(globalThis, ZigString.static("position"), this.getPosition(globalThis));
|
||||
object.put(globalThis, ZigString.static("message"), this.getMessage(globalThis));
|
||||
object.put(globalThis, ZigString.static("level"), this.getLevel(globalThis));
|
||||
|
||||
@@ -49,7 +49,6 @@ standalone_module_graph: ?*bun.StandaloneModuleGraph = null,
|
||||
smol: bool = false,
|
||||
dns_result_order: DNSResolver.Order = .verbatim,
|
||||
cpu_profiler_config: ?CPUProfilerConfig = null,
|
||||
heap_profiler_config: ?HeapProfilerConfig = null,
|
||||
counters: Counters = .{},
|
||||
|
||||
hot_reload: bun.cli.Command.HotReload = .none,
|
||||
@@ -543,7 +542,7 @@ fn wrapUnhandledRejectionErrorForUncaughtException(globalObject: *JSGlobalObject
|
||||
break :blk false;
|
||||
}) return reason;
|
||||
const reasonStr = blk: {
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
scope.init(globalObject, @src());
|
||||
defer scope.deinit();
|
||||
defer if (scope.exception()) |_| scope.clearException();
|
||||
@@ -844,15 +843,6 @@ pub fn onExit(this: *VirtualMachine) void {
|
||||
};
|
||||
}
|
||||
|
||||
// Write heap profile if profiling was enabled - do this after CPU profile but before shutdown
|
||||
// Grab the config and null it out to make this idempotent
|
||||
if (this.heap_profiler_config) |config| {
|
||||
this.heap_profiler_config = null;
|
||||
HeapProfiler.generateAndWriteProfile(this.jsc_vm, config) catch |err| {
|
||||
Output.err(err, "Failed to write heap profile", .{});
|
||||
};
|
||||
}
|
||||
|
||||
this.exit_handler.dispatchOnExit();
|
||||
this.is_shutting_down = true;
|
||||
|
||||
@@ -3725,9 +3715,6 @@ const Allocator = std.mem.Allocator;
|
||||
const CPUProfiler = @import("./bindings/BunCPUProfiler.zig");
|
||||
const CPUProfilerConfig = CPUProfiler.CPUProfilerConfig;
|
||||
|
||||
const HeapProfiler = @import("./bindings/BunHeapProfiler.zig");
|
||||
const HeapProfilerConfig = HeapProfiler.HeapProfilerConfig;
|
||||
|
||||
const bun = @import("bun");
|
||||
const Async = bun.Async;
|
||||
const DotEnv = bun.DotEnv;
|
||||
|
||||
@@ -27,10 +27,8 @@ pub const Subprocess = @import("./api/bun/subprocess.zig");
|
||||
pub const Terminal = @import("./api/bun/Terminal.zig");
|
||||
pub const HashObject = @import("./api/HashObject.zig");
|
||||
pub const JSONCObject = @import("./api/JSONCObject.zig");
|
||||
pub const MarkdownObject = @import("./api/MarkdownObject.zig");
|
||||
pub const TOMLObject = @import("./api/TOMLObject.zig");
|
||||
pub const UnsafeObject = @import("./api/UnsafeObject.zig");
|
||||
pub const JSON5Object = @import("./api/JSON5Object.zig");
|
||||
pub const YAMLObject = @import("./api/YAMLObject.zig");
|
||||
pub const Timer = @import("./api/Timer.zig");
|
||||
pub const FFIObject = @import("./api/FFIObject.zig");
|
||||
|
||||
@@ -712,7 +712,7 @@ const WriteContext = struct {
|
||||
return switch (this.result) {
|
||||
.success => .{ .resolve = .js_undefined },
|
||||
.err => |e| .{ .reject = globalThis.createErrorInstance("{s}", .{@errorName(e)}) },
|
||||
.sys_err => |sys_err| .{ .reject = try sys_err.toJS(globalThis) },
|
||||
.sys_err => |sys_err| .{ .reject = sys_err.toJS(globalThis) },
|
||||
};
|
||||
}
|
||||
|
||||
@@ -866,7 +866,7 @@ const FilesContext = struct {
|
||||
blob_ptr.name = bun.String.cloneUTF8(entry.path);
|
||||
blob_ptr.last_modified = @floatFromInt(entry.mtime * 1000);
|
||||
|
||||
try map_ptr.set(globalThis, try blob_ptr.name.toJS(globalThis), blob_ptr.toJS(globalThis));
|
||||
try map_ptr.set(globalThis, blob_ptr.name.toJS(globalThis), blob_ptr.toJS(globalThis));
|
||||
}
|
||||
|
||||
return .{ .resolve = map };
|
||||
|
||||
@@ -63,9 +63,7 @@ pub const BunObject = struct {
|
||||
pub const SHA512 = toJSLazyPropertyCallback(Crypto.SHA512.getter);
|
||||
pub const SHA512_256 = toJSLazyPropertyCallback(Crypto.SHA512_256.getter);
|
||||
pub const JSONC = toJSLazyPropertyCallback(Bun.getJSONCObject);
|
||||
pub const markdown = toJSLazyPropertyCallback(Bun.getMarkdownObject);
|
||||
pub const TOML = toJSLazyPropertyCallback(Bun.getTOMLObject);
|
||||
pub const JSON5 = toJSLazyPropertyCallback(Bun.getJSON5Object);
|
||||
pub const YAML = toJSLazyPropertyCallback(Bun.getYAMLObject);
|
||||
pub const Transpiler = toJSLazyPropertyCallback(Bun.getTranspilerConstructor);
|
||||
pub const argv = toJSLazyPropertyCallback(Bun.getArgv);
|
||||
@@ -132,9 +130,7 @@ pub const BunObject = struct {
|
||||
@export(&BunObject.SHA512, .{ .name = lazyPropertyCallbackName("SHA512") });
|
||||
@export(&BunObject.SHA512_256, .{ .name = lazyPropertyCallbackName("SHA512_256") });
|
||||
@export(&BunObject.JSONC, .{ .name = lazyPropertyCallbackName("JSONC") });
|
||||
@export(&BunObject.markdown, .{ .name = lazyPropertyCallbackName("markdown") });
|
||||
@export(&BunObject.TOML, .{ .name = lazyPropertyCallbackName("TOML") });
|
||||
@export(&BunObject.JSON5, .{ .name = lazyPropertyCallbackName("JSON5") });
|
||||
@export(&BunObject.YAML, .{ .name = lazyPropertyCallbackName("YAML") });
|
||||
@export(&BunObject.Glob, .{ .name = lazyPropertyCallbackName("Glob") });
|
||||
@export(&BunObject.Transpiler, .{ .name = lazyPropertyCallbackName("Transpiler") });
|
||||
@@ -610,7 +606,7 @@ fn getMain(globalThis: *jsc.JSGlobalObject) callconv(jsc.conv) jsc.JSValue {
|
||||
}
|
||||
}
|
||||
|
||||
return vm.main_resolved_path.toJS(globalThis) catch .zero;
|
||||
return vm.main_resolved_path.toJS(globalThis);
|
||||
}
|
||||
|
||||
return ZigString.init(vm.main).toJS(globalThis);
|
||||
@@ -1107,7 +1103,7 @@ pub export fn Bun__escapeHTML16(globalObject: *jsc.JSGlobalObject, input_value:
|
||||
assert(len > 0);
|
||||
const input_slice = ptr[0..len];
|
||||
const escaped = strings.escapeHTMLForUTF16Input(globalObject.bunVM().allocator, input_slice) catch {
|
||||
return globalObject.throwValue(ZigString.init("Out of memory").toErrorInstance(globalObject)) catch return .zero;
|
||||
return globalObject.throwValue(bun.String.static("Out of memory").toJS(globalObject)) catch .zero;
|
||||
};
|
||||
|
||||
return switch (escaped) {
|
||||
@@ -1125,7 +1121,7 @@ pub export fn Bun__escapeHTML8(globalObject: *jsc.JSGlobalObject, input_value: J
|
||||
const allocator = if (input_slice.len <= 32) stack_allocator.get() else stack_allocator.fallback_allocator;
|
||||
|
||||
const escaped = strings.escapeHTMLForLatin1Input(allocator, input_slice) catch {
|
||||
return globalObject.throwValue(ZigString.init("Out of memory").toErrorInstance(globalObject)) catch return .zero;
|
||||
return globalObject.throwValue(bun.String.static("Out of memory").toJS(globalObject)) catch .zero;
|
||||
};
|
||||
|
||||
switch (escaped) {
|
||||
@@ -1242,7 +1238,7 @@ pub fn mmapFile(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.
|
||||
.result => |map| map,
|
||||
|
||||
.err => |err| {
|
||||
return globalThis.throwValue(try err.toJS(globalThis));
|
||||
return globalThis.throwValue(err.toJS(globalThis));
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1269,17 +1265,10 @@ pub fn getHashObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSVa
|
||||
pub fn getJSONCObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return JSONCObject.create(globalThis);
|
||||
}
|
||||
pub fn getMarkdownObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return MarkdownObject.create(globalThis);
|
||||
}
|
||||
pub fn getTOMLObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return TOMLObject.create(globalThis);
|
||||
}
|
||||
|
||||
pub fn getJSON5Object(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return JSON5Object.create(globalThis);
|
||||
}
|
||||
|
||||
pub fn getYAMLObject(globalThis: *jsc.JSGlobalObject, _: *jsc.JSObject) jsc.JSValue {
|
||||
return YAMLObject.create(globalThis);
|
||||
}
|
||||
@@ -2077,9 +2066,7 @@ const gen = bun.gen.BunObject;
|
||||
const api = bun.api;
|
||||
const FFIObject = bun.api.FFIObject;
|
||||
const HashObject = bun.api.HashObject;
|
||||
const JSON5Object = bun.api.JSON5Object;
|
||||
const JSONCObject = bun.api.JSONCObject;
|
||||
const MarkdownObject = bun.api.MarkdownObject;
|
||||
const TOMLObject = bun.api.TOMLObject;
|
||||
const UnsafeObject = bun.api.UnsafeObject;
|
||||
const YAMLObject = bun.api.YAMLObject;
|
||||
|
||||
@@ -242,10 +242,6 @@ pub const JSBundler = struct {
|
||||
bytecode: bool = false,
|
||||
banner: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
footer: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
/// Path to write JSON metafile (if specified via metafile object) - TEST: moved here
|
||||
metafile_json_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
/// Path to write markdown metafile (if specified via metafile object) - TEST: moved here
|
||||
metafile_markdown_path: OwnedString = OwnedString.initEmpty(bun.default_allocator),
|
||||
css_chunking: bool = false,
|
||||
drop: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||
features: bun.StringSet = bun.StringSet.init(bun.default_allocator),
|
||||
@@ -258,7 +254,6 @@ pub const JSBundler = struct {
|
||||
/// In-memory files that can be used as entrypoints or imported.
|
||||
/// These files do not need to exist on disk.
|
||||
files: FileMap = .{},
|
||||
/// Generate metafile (JSON module graph)
|
||||
metafile: bool = false,
|
||||
|
||||
pub const CompileOptions = struct {
|
||||
@@ -941,30 +936,8 @@ pub const JSBundler = struct {
|
||||
this.throw_on_error = flag;
|
||||
}
|
||||
|
||||
// Parse metafile option: boolean | string | { json?: string, markdown?: string }
|
||||
if (try config.getOwn(globalThis, "metafile")) |metafile_value| {
|
||||
if (metafile_value.isBoolean()) {
|
||||
this.metafile = metafile_value == .true;
|
||||
} else if (metafile_value.isString()) {
|
||||
// metafile: "path/to/meta.json" - shorthand for { json: "..." }
|
||||
this.metafile = true;
|
||||
const slice = try metafile_value.toSlice(globalThis, bun.default_allocator);
|
||||
defer slice.deinit();
|
||||
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||
} else if (metafile_value.isObject()) {
|
||||
// metafile: { json?: string, markdown?: string }
|
||||
this.metafile = true;
|
||||
if (try metafile_value.getOptional(globalThis, "json", ZigString.Slice)) |slice| {
|
||||
defer slice.deinit();
|
||||
try this.metafile_json_path.appendSliceExact(slice.slice());
|
||||
}
|
||||
if (try metafile_value.getOptional(globalThis, "markdown", ZigString.Slice)) |slice| {
|
||||
defer slice.deinit();
|
||||
try this.metafile_markdown_path.appendSliceExact(slice.slice());
|
||||
}
|
||||
} else if (!metafile_value.isUndefinedOrNull()) {
|
||||
return globalThis.throwInvalidArguments("Expected metafile to be a boolean, string, or object with json/markdown paths", .{});
|
||||
}
|
||||
if (try config.getBooleanLoose(globalThis, "metafile")) |flag| {
|
||||
this.metafile = flag;
|
||||
}
|
||||
|
||||
if (try CompileOptions.fromJS(
|
||||
@@ -1097,8 +1070,6 @@ pub const JSBundler = struct {
|
||||
self.footer.deinit();
|
||||
self.tsconfig_override.deinit();
|
||||
self.files.deinitAndUnprotect();
|
||||
self.metafile_json_path.deinit();
|
||||
self.metafile_markdown_path.deinit();
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1110,28 +1081,6 @@ pub const JSBundler = struct {
|
||||
return globalThis.throwInvalidArguments("Expected a config object to be passed to Bun.build", .{});
|
||||
}
|
||||
|
||||
const vm = globalThis.bunVM();
|
||||
|
||||
// Detect and prevent calling Bun.build from within a macro during bundling.
|
||||
// This would cause a deadlock because:
|
||||
// 1. The bundler thread (singleton) is processing the outer Bun.build
|
||||
// 2. During parsing, it encounters a macro and evaluates it
|
||||
// 3. The macro calls Bun.build, which tries to enqueue to the same singleton thread
|
||||
// 4. The singleton thread is blocked waiting for the macro to complete -> deadlock
|
||||
if (vm.macro_mode) {
|
||||
return globalThis.throw(
|
||||
\\Bun.build cannot be called from within a macro during bundling.
|
||||
\\
|
||||
\\This would cause a deadlock because the bundler is waiting for the macro to complete,
|
||||
\\but the macro's Bun.build call is waiting for the bundler.
|
||||
\\
|
||||
\\To bundle code at compile time in a macro, use Bun.spawnSync to invoke the CLI:
|
||||
\\ const result = Bun.spawnSync(["bun", "build", entrypoint, "--format=esm"]);
|
||||
,
|
||||
.{},
|
||||
);
|
||||
}
|
||||
|
||||
var plugins: ?*Plugin = null;
|
||||
const config = try Config.fromJS(globalThis, arguments[0], &plugins, bun.default_allocator);
|
||||
|
||||
@@ -1139,7 +1088,7 @@ pub const JSBundler = struct {
|
||||
config,
|
||||
plugins,
|
||||
globalThis,
|
||||
vm.eventLoop(),
|
||||
globalThis.bunVM().eventLoop(),
|
||||
bun.default_allocator,
|
||||
);
|
||||
}
|
||||
@@ -1490,7 +1439,7 @@ pub const JSBundler = struct {
|
||||
error.JSTerminated => return error.JSTerminated,
|
||||
};
|
||||
|
||||
var scope: jsc.TopExceptionScope = undefined;
|
||||
var scope: jsc.CatchScope = undefined;
|
||||
scope.init(globalThis, @src());
|
||||
defer scope.deinit();
|
||||
|
||||
@@ -1717,11 +1666,9 @@ pub const BuildArtifact = struct {
|
||||
@"entry-point",
|
||||
sourcemap,
|
||||
bytecode,
|
||||
@"metafile-json",
|
||||
@"metafile-markdown",
|
||||
|
||||
pub fn isFileInStandaloneMode(this: OutputKind) bool {
|
||||
return this != .sourcemap and this != .bytecode and this != .@"metafile-json" and this != .@"metafile-markdown";
|
||||
return this != .sourcemap and this != .bytecode;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,433 +0,0 @@
|
||||
pub fn create(globalThis: *jsc.JSGlobalObject) jsc.JSValue {
|
||||
const object = JSValue.createEmptyObject(globalThis, 2);
|
||||
object.put(
|
||||
globalThis,
|
||||
ZigString.static("parse"),
|
||||
jsc.JSFunction.create(globalThis, "parse", parse, 1, .{}),
|
||||
);
|
||||
object.put(
|
||||
globalThis,
|
||||
ZigString.static("stringify"),
|
||||
jsc.JSFunction.create(globalThis, "stringify", stringify, 3, .{}),
|
||||
);
|
||||
return object;
|
||||
}
|
||||
|
||||
pub fn stringify(
|
||||
global: *jsc.JSGlobalObject,
|
||||
callFrame: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
const value, const replacer, const space_value = callFrame.argumentsAsArray(3);
|
||||
|
||||
value.ensureStillAlive();
|
||||
|
||||
if (value.isUndefined() or value.isSymbol() or value.isFunction()) {
|
||||
return .js_undefined;
|
||||
}
|
||||
|
||||
if (!replacer.isUndefinedOrNull()) {
|
||||
return global.throw("JSON5.stringify does not support the replacer argument", .{});
|
||||
}
|
||||
|
||||
var stringifier: Stringifier = try .init(global, space_value);
|
||||
defer stringifier.deinit();
|
||||
|
||||
stringifier.stringifyValue(global, value) catch |err| return switch (err) {
|
||||
error.OutOfMemory, error.JSError, error.JSTerminated => |js_err| js_err,
|
||||
error.StackOverflow => global.throwStackOverflow(),
|
||||
};
|
||||
|
||||
return stringifier.builder.toString(global);
|
||||
}
|
||||
|
||||
pub fn parse(
|
||||
global: *jsc.JSGlobalObject,
|
||||
callFrame: *jsc.CallFrame,
|
||||
) bun.JSError!jsc.JSValue {
|
||||
var arena: bun.ArenaAllocator = .init(bun.default_allocator);
|
||||
defer arena.deinit();
|
||||
const allocator = arena.allocator();
|
||||
|
||||
var ast_memory_allocator = bun.handleOom(allocator.create(ast.ASTMemoryAllocator));
|
||||
var ast_scope = ast_memory_allocator.enter(allocator);
|
||||
defer ast_scope.exit();
|
||||
|
||||
const input_value = callFrame.argument(0);
|
||||
|
||||
if (input_value.isEmptyOrUndefinedOrNull()) {
|
||||
return global.throwInvalidArguments("Expected a string to parse", .{});
|
||||
}
|
||||
|
||||
const input: jsc.Node.BlobOrStringOrBuffer =
|
||||
try jsc.Node.BlobOrStringOrBuffer.fromJS(global, allocator, input_value) orelse input: {
|
||||
var str = try input_value.toBunString(global);
|
||||
defer str.deref();
|
||||
break :input .{ .string_or_buffer = .{ .string = str.toSlice(allocator) } };
|
||||
};
|
||||
defer input.deinit();
|
||||
|
||||
var log = logger.Log.init(bun.default_allocator);
|
||||
defer log.deinit();
|
||||
|
||||
const source = &logger.Source.initPathString("input.json5", input.slice());
|
||||
|
||||
const root = json5.JSON5Parser.parse(source, &log, allocator) catch |err| return switch (err) {
|
||||
error.OutOfMemory => |oom| oom,
|
||||
error.StackOverflow => global.throwStackOverflow(),
|
||||
else => {
|
||||
if (log.msgs.items.len > 0) {
|
||||
const first_msg = log.msgs.items[0];
|
||||
return global.throwValue(global.createSyntaxErrorInstance(
|
||||
"JSON5 Parse error: {s}",
|
||||
.{first_msg.data.text},
|
||||
));
|
||||
}
|
||||
return global.throwValue(global.createSyntaxErrorInstance(
|
||||
"JSON5 Parse error: Unable to parse JSON5 string",
|
||||
.{},
|
||||
));
|
||||
},
|
||||
};
|
||||
|
||||
return exprToJS(root, global);
|
||||
}
|
||||
|
||||
const Stringifier = struct {
|
||||
stack_check: bun.StackCheck,
|
||||
builder: wtf.StringBuilder,
|
||||
indent: usize,
|
||||
space: Space,
|
||||
visiting: std.AutoHashMapUnmanaged(JSValue, void),
|
||||
allocator: std.mem.Allocator,
|
||||
|
||||
const StringifyError = bun.JSError || bun.StackOverflow;
|
||||
|
||||
const Space = union(enum) {
|
||||
minified,
|
||||
number: u32,
|
||||
str: bun.String,
|
||||
|
||||
pub fn init(global: *jsc.JSGlobalObject, space_value: JSValue) bun.JSError!Space {
|
||||
const space = try space_value.unwrapBoxedPrimitive(global);
|
||||
if (space.isNumber()) {
|
||||
// Clamp on the float to match the spec's min(10, ToIntegerOrInfinity(space)).
|
||||
// toInt32() wraps large values and Infinity to 0, which is wrong.
|
||||
const num_f = space.asNumber();
|
||||
if (!(num_f >= 1)) return .minified; // handles NaN, -Infinity, 0, negatives
|
||||
return .{ .number = if (num_f > 10) 10 else @intFromFloat(num_f) };
|
||||
}
|
||||
if (space.isString()) {
|
||||
const str = try space.toBunString(global);
|
||||
if (str.length() == 0) {
|
||||
str.deref();
|
||||
return .minified;
|
||||
}
|
||||
return .{ .str = str };
|
||||
}
|
||||
return .minified;
|
||||
}
|
||||
|
||||
pub fn deinit(this: *const Space) void {
|
||||
switch (this.*) {
|
||||
.str => |str| str.deref(),
|
||||
.minified, .number => {},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub fn init(global: *jsc.JSGlobalObject, space_value: JSValue) bun.JSError!Stringifier {
|
||||
return .{
|
||||
.stack_check = .init(),
|
||||
.builder = .init(),
|
||||
.indent = 0,
|
||||
.space = try Space.init(global, space_value),
|
||||
.visiting = .empty,
|
||||
.allocator = bun.default_allocator,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(this: *Stringifier) void {
|
||||
this.builder.deinit();
|
||||
this.space.deinit();
|
||||
this.visiting.deinit(this.allocator);
|
||||
}
|
||||
|
||||
pub fn stringifyValue(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
if (!this.stack_check.isSafeToRecurse()) {
|
||||
return error.StackOverflow;
|
||||
}
|
||||
|
||||
const unwrapped = try value.unwrapBoxedPrimitive(global);
|
||||
|
||||
if (unwrapped.isNull()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isNumber()) {
|
||||
if (unwrapped.isInt32()) {
|
||||
this.builder.append(.int, unwrapped.asInt32());
|
||||
return;
|
||||
}
|
||||
const num = unwrapped.asNumber();
|
||||
if (std.math.isNegativeInf(num)) {
|
||||
this.builder.append(.latin1, "-Infinity");
|
||||
} else if (std.math.isInf(num)) {
|
||||
this.builder.append(.latin1, "Infinity");
|
||||
} else if (std.math.isNan(num)) {
|
||||
this.builder.append(.latin1, "NaN");
|
||||
} else {
|
||||
this.builder.append(.double, num);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isBigInt()) {
|
||||
return global.throw("JSON5.stringify cannot serialize BigInt", .{});
|
||||
}
|
||||
|
||||
if (unwrapped.isBoolean()) {
|
||||
this.builder.append(.latin1, if (unwrapped.asBoolean()) "true" else "false");
|
||||
return;
|
||||
}
|
||||
|
||||
if (unwrapped.isString()) {
|
||||
const str = try unwrapped.toBunString(global);
|
||||
defer str.deref();
|
||||
this.appendQuotedString(str);
|
||||
return;
|
||||
}
|
||||
|
||||
// Object or array — check for circular references
|
||||
const gop = try this.visiting.getOrPut(this.allocator, unwrapped);
|
||||
if (gop.found_existing) {
|
||||
return global.throw("Converting circular structure to JSON5", .{});
|
||||
}
|
||||
defer _ = this.visiting.remove(unwrapped);
|
||||
|
||||
if (unwrapped.isArray()) {
|
||||
try this.stringifyArray(global, unwrapped);
|
||||
} else {
|
||||
try this.stringifyObject(global, unwrapped);
|
||||
}
|
||||
}
|
||||
|
||||
fn stringifyArray(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
var iter = try value.arrayIterator(global);
|
||||
|
||||
if (iter.len == 0) {
|
||||
this.builder.append(.latin1, "[]");
|
||||
return;
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '[');
|
||||
|
||||
switch (this.space) {
|
||||
.minified => {
|
||||
var first = true;
|
||||
while (try iter.next()) |item| {
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
if (item.isUndefined() or item.isSymbol() or item.isFunction()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
} else {
|
||||
try this.stringifyValue(global, item);
|
||||
}
|
||||
}
|
||||
},
|
||||
.number, .str => {
|
||||
this.indent += 1;
|
||||
var first = true;
|
||||
while (try iter.next()) |item| {
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.newline();
|
||||
if (item.isUndefined() or item.isSymbol() or item.isFunction()) {
|
||||
this.builder.append(.latin1, "null");
|
||||
} else {
|
||||
try this.stringifyValue(global, item);
|
||||
}
|
||||
}
|
||||
// Trailing comma
|
||||
this.builder.append(.lchar, ',');
|
||||
this.indent -= 1;
|
||||
this.newline();
|
||||
},
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, ']');
|
||||
}
|
||||
|
||||
fn stringifyObject(this: *Stringifier, global: *jsc.JSGlobalObject, value: JSValue) StringifyError!void {
|
||||
var iter: jsc.JSPropertyIterator(.{ .skip_empty_name = false, .include_value = true }) = try .init(
|
||||
global,
|
||||
try value.toObject(global),
|
||||
);
|
||||
defer iter.deinit();
|
||||
|
||||
if (iter.len == 0) {
|
||||
this.builder.append(.latin1, "{}");
|
||||
return;
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '{');
|
||||
|
||||
switch (this.space) {
|
||||
.minified => {
|
||||
var first = true;
|
||||
while (try iter.next()) |prop_name| {
|
||||
if (iter.value.isUndefined() or iter.value.isSymbol() or iter.value.isFunction()) {
|
||||
continue;
|
||||
}
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.appendKey(prop_name);
|
||||
this.builder.append(.lchar, ':');
|
||||
try this.stringifyValue(global, iter.value);
|
||||
}
|
||||
},
|
||||
.number, .str => {
|
||||
this.indent += 1;
|
||||
var first = true;
|
||||
while (try iter.next()) |prop_name| {
|
||||
if (iter.value.isUndefined() or iter.value.isSymbol() or iter.value.isFunction()) {
|
||||
continue;
|
||||
}
|
||||
if (!first) this.builder.append(.lchar, ',');
|
||||
first = false;
|
||||
this.newline();
|
||||
this.appendKey(prop_name);
|
||||
this.builder.append(.latin1, ": ");
|
||||
try this.stringifyValue(global, iter.value);
|
||||
}
|
||||
this.indent -= 1;
|
||||
if (!first) {
|
||||
// Trailing comma
|
||||
this.builder.append(.lchar, ',');
|
||||
this.newline();
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
this.builder.append(.lchar, '}');
|
||||
}
|
||||
|
||||
fn appendKey(this: *Stringifier, name: bun.String) void {
|
||||
const is_identifier = is_identifier: {
|
||||
if (name.length() == 0) break :is_identifier false;
|
||||
if (!bun.js_lexer.isIdentifierStart(@intCast(name.charAt(0)))) break :is_identifier false;
|
||||
for (1..name.length()) |i| {
|
||||
if (!bun.js_lexer.isIdentifierContinue(@intCast(name.charAt(i)))) break :is_identifier false;
|
||||
}
|
||||
break :is_identifier true;
|
||||
};
|
||||
|
||||
if (is_identifier) {
|
||||
this.builder.append(.string, name);
|
||||
} else {
|
||||
this.appendQuotedString(name);
|
||||
}
|
||||
}
|
||||
|
||||
fn appendQuotedString(this: *Stringifier, str: bun.String) void {
|
||||
this.builder.append(.lchar, '\'');
|
||||
for (0..str.length()) |i| {
|
||||
const c = str.charAt(i);
|
||||
switch (c) {
|
||||
0x00 => this.builder.append(.latin1, "\\0"),
|
||||
0x08 => this.builder.append(.latin1, "\\b"),
|
||||
0x09 => this.builder.append(.latin1, "\\t"),
|
||||
0x0a => this.builder.append(.latin1, "\\n"),
|
||||
0x0b => this.builder.append(.latin1, "\\v"),
|
||||
0x0c => this.builder.append(.latin1, "\\f"),
|
||||
0x0d => this.builder.append(.latin1, "\\r"),
|
||||
0x27 => this.builder.append(.latin1, "\\'"), // single quote
|
||||
0x5c => this.builder.append(.latin1, "\\\\"), // backslash
|
||||
0x2028 => this.builder.append(.latin1, "\\u2028"),
|
||||
0x2029 => this.builder.append(.latin1, "\\u2029"),
|
||||
0x01...0x07, 0x0e...0x1f, 0x7f => {
|
||||
// Other control chars → \xHH
|
||||
this.builder.append(.latin1, "\\x");
|
||||
this.builder.append(.lchar, hexDigit(c >> 4));
|
||||
this.builder.append(.lchar, hexDigit(c & 0x0f));
|
||||
},
|
||||
else => this.builder.append(.uchar, c),
|
||||
}
|
||||
}
|
||||
this.builder.append(.lchar, '\'');
|
||||
}
|
||||
|
||||
fn hexDigit(v: u16) u8 {
|
||||
const nibble: u8 = @intCast(v & 0x0f);
|
||||
return if (nibble < 10) '0' + nibble else 'a' + nibble - 10;
|
||||
}
|
||||
|
||||
fn newline(this: *Stringifier) void {
|
||||
switch (this.space) {
|
||||
.minified => {},
|
||||
.number => |space_num| {
|
||||
this.builder.append(.lchar, '\n');
|
||||
for (0..this.indent * space_num) |_| {
|
||||
this.builder.append(.lchar, ' ');
|
||||
}
|
||||
},
|
||||
.str => |space_str| {
|
||||
this.builder.append(.lchar, '\n');
|
||||
const clamped = if (space_str.length() > 10)
|
||||
space_str.substringWithLen(0, 10)
|
||||
else
|
||||
space_str;
|
||||
for (0..this.indent) |_| {
|
||||
this.builder.append(.string, clamped);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn exprToJS(expr: Expr, global: *jsc.JSGlobalObject) bun.JSError!jsc.JSValue {
|
||||
switch (expr.data) {
|
||||
.e_null => return .null,
|
||||
.e_boolean => |boolean| return .jsBoolean(boolean.value),
|
||||
.e_number => |number| return .jsNumber(number.value),
|
||||
.e_string => |str| {
|
||||
return str.toJS(bun.default_allocator, global);
|
||||
},
|
||||
.e_array => |arr| {
|
||||
var js_arr = try JSValue.createEmptyArray(global, arr.items.len);
|
||||
for (arr.slice(), 0..) |item, _i| {
|
||||
const i: u32 = @intCast(_i);
|
||||
const value = try exprToJS(item, global);
|
||||
try js_arr.putIndex(global, i, value);
|
||||
}
|
||||
return js_arr;
|
||||
},
|
||||
.e_object => |obj| {
|
||||
var js_obj = JSValue.createEmptyObject(global, obj.properties.len);
|
||||
for (obj.properties.slice()) |prop| {
|
||||
const key_expr = prop.key.?;
|
||||
const value = try exprToJS(prop.value.?, global);
|
||||
const key_js = try exprToJS(key_expr, global);
|
||||
const key_str = try key_js.toBunString(global);
|
||||
defer key_str.deref();
|
||||
try js_obj.putMayBeIndex(global, &key_str, value);
|
||||
}
|
||||
return js_obj;
|
||||
},
|
||||
else => return .js_undefined,
|
||||
}
|
||||
}
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const bun = @import("bun");
|
||||
const logger = bun.logger;
|
||||
const json5 = bun.interchange.json5;
|
||||
|
||||
const ast = bun.ast;
|
||||
const Expr = ast.Expr;
|
||||
|
||||
const jsc = bun.jsc;
|
||||
const JSValue = jsc.JSValue;
|
||||
const ZigString = jsc.ZigString;
|
||||
const wtf = jsc.wtf;
|
||||
@@ -42,7 +42,6 @@ pub const Config = struct {
|
||||
minify_identifiers: bool = false,
|
||||
minify_syntax: bool = false,
|
||||
no_macros: bool = false,
|
||||
repl_mode: bool = false,
|
||||
|
||||
pub fn fromJS(this: *Config, globalThis: *jsc.JSGlobalObject, object: jsc.JSValue, allocator: std.mem.Allocator) bun.JSError!void {
|
||||
if (object.isUndefinedOrNull()) {
|
||||
@@ -246,10 +245,6 @@ pub const Config = struct {
|
||||
this.dead_code_elimination = flag;
|
||||
}
|
||||
|
||||
if (try object.getBooleanLoose(globalThis, "replMode")) |flag| {
|
||||
this.repl_mode = flag;
|
||||
}
|
||||
|
||||
if (try object.getTruthy(globalThis, "minify")) |minify| {
|
||||
if (minify.isBoolean()) {
|
||||
this.minify_whitespace = minify.toBoolean();
|
||||
@@ -574,10 +569,7 @@ pub const TransformTask = struct {
|
||||
}
|
||||
|
||||
fn finish(this: *TransformTask, promise: *jsc.JSPromise) bun.JSTerminated!void {
|
||||
const value = this.output_code.transferToJS(this.global) catch |e| {
|
||||
return promise.reject(this.global, this.global.takeException(e));
|
||||
};
|
||||
return promise.resolve(this.global, value);
|
||||
return promise.resolve(this.global, this.output_code.transferToJS(this.global));
|
||||
}
|
||||
|
||||
pub fn deinit(this: *TransformTask) void {
|
||||
@@ -706,8 +698,7 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
|
||||
transpiler.options.macro_remap = config.macro_map;
|
||||
}
|
||||
|
||||
// REPL mode disables DCE to preserve expressions like `42`
|
||||
transpiler.options.dead_code_elimination = config.dead_code_elimination and !config.repl_mode;
|
||||
transpiler.options.dead_code_elimination = config.dead_code_elimination;
|
||||
transpiler.options.minify_whitespace = config.minify_whitespace;
|
||||
|
||||
// Keep defaults for these
|
||||
@@ -726,7 +717,6 @@ pub fn constructor(globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) b
|
||||
transpiler.options.inlining = config.runtime.inlining;
|
||||
transpiler.options.hot_module_reloading = config.runtime.hot_module_reloading;
|
||||
transpiler.options.react_fast_refresh = false;
|
||||
transpiler.options.repl_mode = config.repl_mode;
|
||||
|
||||
return this;
|
||||
}
|
||||
@@ -748,47 +738,9 @@ pub fn deinit(this: *JSTranspiler) void {
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
/// Check if code looks like an object literal that would be misinterpreted as a block
|
||||
/// Returns true if code starts with { (after whitespace) and doesn't end with ;
|
||||
/// This matches Node.js REPL behavior for object literal disambiguation
|
||||
fn isLikelyObjectLiteral(code: []const u8) bool {
|
||||
// Skip leading whitespace
|
||||
var start: usize = 0;
|
||||
while (start < code.len and (code[start] == ' ' or code[start] == '\t' or code[start] == '\n' or code[start] == '\r')) {
|
||||
start += 1;
|
||||
}
|
||||
|
||||
// Check if starts with {
|
||||
if (start >= code.len or code[start] != '{') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip trailing whitespace
|
||||
var end: usize = code.len;
|
||||
while (end > 0 and (code[end - 1] == ' ' or code[end - 1] == '\t' or code[end - 1] == '\n' or code[end - 1] == '\r')) {
|
||||
end -= 1;
|
||||
}
|
||||
|
||||
// Check if ends with semicolon - if so, it's likely a block statement
|
||||
if (end > 0 and code[end - 1] == ';') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []const u8, loader: ?Loader, macro_js_ctx: Transpiler.MacroJSValueType) ?Transpiler.ParseResult {
|
||||
const name = this.config.default_loader.stdinName();
|
||||
|
||||
// In REPL mode, wrap potential object literals in parentheses
|
||||
// If code starts with { and doesn't end with ; it might be an object literal
|
||||
// that would otherwise be parsed as a block statement
|
||||
const processed_code: []const u8 = if (this.config.repl_mode and isLikelyObjectLiteral(code))
|
||||
std.fmt.allocPrint(allocator, "({s})", .{code}) catch code
|
||||
else
|
||||
code;
|
||||
|
||||
const source = &logger.Source.initPathString(name, processed_code);
|
||||
const source = &logger.Source.initPathString(name, code);
|
||||
|
||||
const jsx = if (this.config.tsconfig != null)
|
||||
this.config.tsconfig.?.mergeJSX(this.transpiler.options.jsx)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user