Compare commits

..

56 Commits

Author SHA1 Message Date
snwy
adf8912fe2 the fix 2024-12-03 18:32:37 -08:00
dave caruso
185bdfbbb6 some fixes 2024-12-03 18:30:59 -08:00
dave caruso
56b729d87b hi 2024-12-03 18:30:59 -08:00
dave caruso
8875454c47 restore launch json 2024-12-03 18:30:59 -08:00
paperdave
f2d5dd1d89 bun run clang-format 2024-12-03 18:30:59 -08:00
Leah Lundqvist
d586211ef2 docs: add .env.test to guides/runtime/set-env for consistency with do… (#15542) 2024-12-03 18:30:59 -08:00
Dylan Conway
b5b033452f fix 14540 (#15498) 2024-12-03 18:30:59 -08:00
Meghan Denny
3e82010aa7 zig: make throw use JSError (#15444) 2024-12-03 18:30:59 -08:00
dave caruso
c94814ed9b THE 2024-12-03 18:30:37 -08:00
Michael H
e82a27dab9 simplify vscode extension title (#15519) 2024-12-03 18:30:37 -08:00
github-actions[bot]
104c864268 deps: update libdeflate to v1.22 (#15505)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-03 18:30:37 -08:00
github-actions[bot]
10af40ca29 deps: update lshpack to v2.3.3 (#15501)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-03 18:30:37 -08:00
github-actions[bot]
64e2fbc298 deps: update c-ares to v1.34.3 (#15502)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-03 18:30:37 -08:00
dave caruso
91d34b9b34 bake(dev): plugins in dev server, with other fixes (#15467)
Co-authored-by: paperdave <paperdave@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:30:37 -08:00
Michael H
51313c0ab1 bun upgrade --help document --stable option (#15472)
Co-authored-by: RiskyMH <RiskyMH@users.noreply.github.com>
2024-12-03 18:28:48 -08:00
imide
b5e08e0fcc Add musl related documentation to installation.md (#15500) 2024-12-03 18:28:48 -08:00
github-actions[bot]
79a20ccf42 deps: update lolhtml to 4f8becea13a0021c8b71abd2dcc5899384973b66 (#15462)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:28:47 -08:00
Jarred Sumner
d213e6c524 bump 2024-12-03 18:28:47 -08:00
Jarred Sumner
e184469698 Check for unix:// instead of unix: 2024-12-03 18:28:47 -08:00
Jarred Sumner
724092f958 Fix debugger connection issue on Windows 2024-12-03 18:28:47 -08:00
Jarred Sumner
fad6d5d9ad Clean up debugger waiting logic (#15469) 2024-12-03 18:28:47 -08:00
Jarred Sumner
06361f21a7 use using 2024-12-03 18:28:47 -08:00
Jarred Sumner
13aae6933d Fixes #15470 2024-12-03 18:28:47 -08:00
Ciro Spaciari
6cb1f66d43 fix(net/tls) fix reusePort, allowHalfOpen, FIN before reconnect (#15452)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:28:47 -08:00
Alistair Smith
ab61a9bf13 fix: Connect with 1 socket to new env var but still work with js debug terminal (#15458) 2024-12-03 18:28:47 -08:00
github-actions[bot]
6c7e447626 deps: update sqlite to 3.470.100 (#15465)
Co-authored-by: Jarred-Sumner <Jarred-Sumner@users.noreply.github.com>
2024-12-03 18:28:47 -08:00
dave caruso
e3e61c1b00 some prod edgecase handling 2024-12-03 18:28:47 -08:00
Jarred Sumner
cd1e1e6aef Stagger the updates 2024-12-03 18:28:47 -08:00
Jarred Sumner
c2f6fc1352 Shorter branch names 2024-12-03 18:28:47 -08:00
Jarred Sumner
24589c63bf Add sqlite3 auto updater script 2024-12-03 18:28:47 -08:00
Jarred Sumner
b8fd80dff3 Update actions 2024-12-03 18:28:47 -08:00
Jarred Sumner
1abab08799 Update update-libarchive.yml 2024-12-03 18:28:47 -08:00
Jarred Sumner
efa2fdb013 Update update-libarchive.yml 2024-12-03 18:28:47 -08:00
Jarred Sumner
6116d160ec github actions 2024-12-03 18:28:46 -08:00
Kai Tamkun
f19d90b82a Fix incorrect public TS class field name minification (#15411) 2024-12-03 18:28:46 -08:00
dave caruso
38b412548b hello 2024-12-03 18:28:46 -08:00
paperdave
ab3aa921ad bun run clang-format 2024-12-03 18:28:46 -08:00
dave caruso
82275c5b36 more work 2024-12-03 18:28:46 -08:00
dave caruso
d1cf9802b6 fix aliased exports 2024-12-03 18:28:46 -08:00
dave caruso
d5827db8f6 update 2024-12-03 18:28:46 -08:00
Dennis Dudek
62ab6ea5a4 bustDirCache on FileSystemRouter.reload & fix of dir_cache keys in windows (#15091)
Co-authored-by: dave caruso <me@paperdave.net>
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:28:46 -08:00
cdfzo
aae90051d1 docs: fix broken windows contributing guide url (#15451) 2024-12-03 18:28:46 -08:00
Jarred Sumner
c774594bfd Auto-update c-ares, libarchive, libdeflate, lolhtml, lshpack weekly (#15442) 2024-12-03 18:28:46 -08:00
Jarred Sumner
71c4140cd4 Clean up .throwError (#15433) 2024-12-03 18:28:46 -08:00
Meghan Denny
5a21eec6a6 musl: fix test/js/bun/http/serve.test.ts (#15271)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:28:46 -08:00
Jarred Sumner
8cdd9c218d Deflake next-build test (#15436) 2024-12-03 18:28:46 -08:00
Meghan Denny
3362252f74 zig: make throwInvalidArgumentTypeValue use JSError (#15302)
Co-authored-by: Ciro Spaciari <ciro.spaciari@gmail.com>
2024-12-03 18:28:46 -08:00
snwy
296affbdbb bake: params used when doing static site generation (#15430) 2024-12-03 18:28:46 -08:00
Ciro Spaciari
7f1a0b28cd fix(fetch) fix deref + deinit (#15432) 2024-12-03 18:28:46 -08:00
Jarred Sumner
9808c02f61 Update c-ares (#15435) 2024-12-03 18:28:46 -08:00
Michael H
41590a9d98 better printing for console.log types (#15404) 2024-12-03 18:28:46 -08:00
Meghan Denny
3c87910da5 zig: eliminate errorUnionToCPP (#15416) 2024-12-03 18:28:46 -08:00
Kai Tamkun
183a46b9c3 FFI: provide napi_env explicitly (#15431)
Co-authored-by: Jarred Sumner <jarred@jarredsumner.com>
2024-12-03 18:27:55 -08:00
dave caruso
4828bef7bd a bit more, svelte doesnt fully import yet 2024-12-03 18:27:55 -08:00
dave caruso
150d13868e alright guys 2024-12-03 18:27:55 -08:00
dave caruso
c84196c051 first impl, not stable 2024-12-03 18:27:54 -08:00
2598 changed files with 58299 additions and 97897 deletions

View File

@@ -1,170 +0,0 @@
ARG LLVM_VERSION="18"
ARG REPORTED_LLVM_VERSION="18.1.8"
ARG OLD_BUN_VERSION="1.1.38"
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
FROM base-$TARGETARCH as base
ARG LLVM_VERSION
ARG OLD_BUN_VERSION
ARG TARGETARCH
ARG DEFAULT_CXXFLAGS
ARG DEFAULT_CFLAGS
ARG REPORTED_LLVM_VERSION
ENV DEBIAN_FRONTEND=noninteractive \
CI=true \
DOCKER=true
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
RUN apt-get update && apt-get install -y --no-install-recommends \
wget curl git python3 python3-pip ninja-build \
software-properties-common apt-transport-https \
ca-certificates gnupg lsb-release unzip \
libxml2-dev ruby ruby-dev bison gawk perl make golang \
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
&& apt-get update \
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
libgfortran5 libc6-dev \
&& wget https://apt.llvm.org/llvm.sh \
&& chmod +x llvm.sh \
&& ./llvm.sh ${LLVM_VERSION} all \
&& rm llvm.sh
RUN --mount=type=tmpfs,target=/tmp \
cmake_version="3.30.5" && \
if [ "$TARGETARCH" = "arm64" ]; then \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
else \
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
fi && \
wget -O /tmp/cmake.sh "$cmake_url" && \
sh /tmp/cmake.sh --skip-license --prefix=/usr
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
CFLAGS=${DEFAULT_CFLAGS} \
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
RUN if [ "$TARGETARCH" = "arm64" ]; then \
export ARCH_PATH="aarch64-linux-gnu"; \
else \
export ARCH_PATH="x86_64-linux-gnu"; \
fi \
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
&& ldconfig
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
&& ln -sf /usr/bin/clang /usr/bin/cc \
&& ln -sf /usr/bin/clang++ /usr/bin/c++
ENV CC="clang" \
CXX="clang++" \
AR="llvm-ar-${LLVM_VERSION}" \
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
LD="lld-${LLVM_VERSION}"
RUN --mount=type=tmpfs,target=/tmp \
bash -c '\
set -euxo pipefail && \
source /etc/environment && \
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
unzip /tmp/bun.zip -d /tmp/bun && \
mv /tmp/bun/*/bun /usr/bin/bun && \
chmod +x /usr/bin/bun'
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
WORKDIR /workspace
FROM --platform=$BUILDPLATFORM base as buildkite
ARG BUILDKITE_AGENT_TAGS
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
echo "Downloading buildkite" && \
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
mkdir -p /tmp/buildkite-agent && \
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
COPY ../*/agent.mjs /var/bun/scripts/
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
WORKDIR /var/bun/scripts
ENV PATH=/root/.cargo/bin:$PATH
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
ARG LLVM_VERSION
WORKDIR /workspace/bun
COPY . /workspace/bun
# Install Rust nightly
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
&& export PATH=$HOME/.cargo/bin:$PATH \
&& rustup install nightly \
&& rustup default nightly
ENV PATH=/root/.cargo/bin:$PATH
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
RUN --mount=type=tmpfs,target=/workspace/bun/build \
ls -la \
&& bun run build:release \
&& mkdir -p /target \
&& cp -r /workspace/bun/build/release/bun /target/bun

View File

@@ -1,122 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Check if running as root
if [ "$EUID" -ne 0 ]; then
echo "error: must run as root"
exit 1
fi
# Check OS compatibility
if ! command -v dnf &> /dev/null; then
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
exit 1
fi
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
# Print each missing file
if [ ! -f /tmp/agent.mjs ]; then
echo "error: /tmp/agent.mjs is missing"
fi
if [ ! -f /tmp/Dockerfile ]; then
echo "error: /tmp/Dockerfile is missing"
fi
exit 1
fi
# Install Docker
dnf update -y
dnf install -y docker
systemctl enable docker
systemctl start docker || {
echo "error: failed to start Docker"
exit 1
}
# Create builder
docker buildx create --name builder --driver docker-container --bootstrap --use || {
echo "error: failed to create Docker buildx builder"
exit 1
}
# Set up Docker to start on boot
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
[Unit]
Description=Buildkite Docker Container
After=docker.service network-online.target
Requires=docker.service network-online.target
[Service]
TimeoutStartSec=0
Restart=always
RestartSec=5
ExecStartPre=-/usr/bin/docker stop buildkite
ExecStartPre=-/usr/bin/docker rm buildkite
ExecStart=/usr/bin/docker run \
--name buildkite \
--restart=unless-stopped \
--network host \
-v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
buildkite:latest
[Install]
WantedBy=multi-user.target
EOF
echo "Building Buildkite image"
# Clean up any previous build artifacts
rm -rf /tmp/fakebun
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
# Copy required files
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
echo "error: failed to copy agent.mjs"
exit 1
}
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
echo "error: failed to copy Dockerfile"
exit 1
}
cd /tmp/fakebun || {
echo "error: failed to change directory"
exit 1
}
# Build the Buildkite image
docker buildx build \
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
--tag buildkite:latest \
--target buildkite \
-f .buildkite/Dockerfile \
--load \
. || {
echo "error: Docker build failed"
exit 1
}
# Create container to ensure image is cached in AMI
docker container create \
--name buildkite \
--restart=unless-stopped \
buildkite:latest || {
echo "error: failed to create buildkite container"
exit 1
}
# Reload systemd to pick up new service
systemctl daemon-reload
# Enable the service, but don't start it yet
systemctl enable buildkite-agent || {
echo "error: failed to enable buildkite-agent service"
exit 1
}
echo "Bootstrap complete"
echo "To start the Buildkite agent, run: "
echo " systemctl start buildkite-agent"

View File

@@ -13,4 +13,19 @@ steps:
agents:
queue: "build-darwin"
command:
- "node .buildkite/ci.mjs"
- ".buildkite/scripts/prepare-build.sh"
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
label: ":github:"
agents:
queue: "test-darwin"
depends_on:
- "darwin-aarch64-build-bun"
- "darwin-x64-build-bun"
- "linux-aarch64-build-bun"
- "linux-x64-build-bun"
- "linux-x64-baseline-build-bun"
- "windows-x64-build-bun"
- "windows-x64-baseline-build-bun"
command:
- ".buildkite/scripts/upload-release.sh"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
#!/bin/bash
set -eo pipefail
function run_command() {
set -x
"$@"
{ set +x; } 2>/dev/null
}
run_command node ".buildkite/ci.mjs"

View File

@@ -3,6 +3,10 @@
set -eo pipefail
function assert_main() {
if [ "$RELEASE" == "1" ]; then
echo "info: Skipping canary release because this is a release build"
exit 0
fi
if [ -z "$BUILDKITE_REPO" ]; then
echo "error: Cannot find repository for this build"
exit 1
@@ -190,6 +194,8 @@ function create_release() {
local artifacts=(
bun-darwin-aarch64.zip
bun-darwin-aarch64-profile.zip
bun-darwin-x64.zip
bun-darwin-x64-profile.zip
bun-linux-aarch64.zip
bun-linux-aarch64-profile.zip
bun-linux-x64.zip
@@ -231,7 +237,8 @@ function create_release() {
}
function assert_canary() {
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
echo "warn: Skipping release because this is not a canary build"
exit 0
fi

View File

@@ -3,6 +3,3 @@ Index:
CompileFlags:
CompilationDatabase: build/debug
Diagnostics:
UnusedIncludes: None

View File

@@ -16,6 +16,3 @@ zig-out
build
vendor
node_modules
*.trace
packages/bun-uws/fuzzing

2
.gitignore vendored
View File

@@ -116,10 +116,8 @@ scripts/env.local
sign.*.json
sign.json
src/bake/generated.ts
src/generated_enum_extractor.zig
src/bun.js/bindings-obj
src/bun.js/bindings/GeneratedJS2Native.zig
src/bun.js/bindings/GeneratedBindings.zig
src/bun.js/debug-bindings-obj
src/deps/zig-clap/.gitattributes
src/deps/zig-clap/.github

View File

@@ -5,5 +5,6 @@ test/js/deno
test/node.js
src/react-refresh.js
*.min.js
test/js/node/test/fixtures
test/js/node/test/common
test/snippets
test/js/node/test

211
.vscode/launch.json generated vendored
View File

@@ -16,13 +16,14 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -32,13 +33,14 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "1",
"BUN_DEBUG_jest": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -54,13 +56,14 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -70,13 +73,14 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -86,13 +90,14 @@
"args": ["test", "--watch", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -102,13 +107,14 @@
"args": ["test", "--hot", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -118,6 +124,7 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -125,7 +132,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -140,6 +147,7 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -147,7 +155,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -169,7 +177,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -179,6 +187,7 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
"BUN_DEBUG_IncrementalGraph": "1",
@@ -188,7 +197,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -198,12 +207,13 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "0",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -213,6 +223,7 @@
"args": ["run", "--watch", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
// "BUN_DEBUG_DEBUGGER": "1",
// "BUN_DEBUG_INTERNAL_DEBUGGER": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
@@ -221,7 +232,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -231,12 +242,13 @@
"args": ["run", "--hot", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -253,7 +265,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -275,7 +287,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -291,13 +303,14 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -307,13 +320,14 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -323,13 +337,14 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -339,13 +354,14 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -355,13 +371,14 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -371,6 +388,7 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -378,7 +396,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -393,6 +411,7 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_DEBUG_jest": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
@@ -400,7 +419,7 @@
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -416,12 +435,47 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun build [...]
{
"type": "lldb",
"request": "launch",
"name": "bun build [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["build", "${fileBasename}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
"request": "launch",
"name": "bun build --app [file]",
"program": "${workspaceFolder}/build/debug/bun-debug",
"args": ["build", "--app", "${file}"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "0",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_DEBUG_production": "1",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// bun test [*]
{
@@ -432,12 +486,13 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -447,12 +502,13 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "0",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -462,13 +518,14 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
"BUN_INSPECT": "ws://localhost:0/",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
"serverReadyAction": {
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
@@ -483,12 +540,13 @@
"args": ["install"],
"cwd": "${fileDirname}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
{
"type": "lldb",
@@ -498,12 +556,13 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"env": {
"FORCE_COLOR": "1",
"BUN_DEBUG_QUIET_LOGS": "1",
"BUN_GARBAGE_COLLECTOR_LEVEL": "2",
},
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
// Windows: bun test [file]
{
@@ -517,6 +576,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -542,6 +605,10 @@
"args": ["test", "--only", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -567,6 +634,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -592,6 +663,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -617,6 +692,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -651,6 +730,10 @@
"args": ["test", "${file}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -686,6 +769,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -711,6 +798,10 @@
"args": ["install"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -732,6 +823,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -753,6 +848,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -783,6 +882,10 @@
"args": ["run", "${fileBasename}"],
"cwd": "${fileDirname}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -814,6 +917,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -839,6 +946,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -864,6 +975,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "0",
@@ -889,6 +1004,10 @@
"args": ["test", "--watch", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -914,6 +1033,10 @@
"args": ["test", "--hot", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -939,6 +1062,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -973,6 +1100,10 @@
"args": ["test", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1008,6 +1139,10 @@
"args": ["exec", "${input:testName}"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1030,6 +1165,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1051,6 +1190,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1076,6 +1219,10 @@
"args": ["test"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1110,6 +1257,10 @@
"args": ["test/runner.node.mjs"],
"cwd": "${workspaceFolder}",
"environment": [
{
"name": "FORCE_COLOR",
"value": "1",
},
{
"name": "BUN_DEBUG_QUIET_LOGS",
"value": "1",
@@ -1125,7 +1276,7 @@
],
"console": "internalConsole",
// Don't pause when the GC runs while the debugger is open.
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
},
],
"inputs": [
@@ -1140,4 +1291,4 @@
"description": "Usage: bun test [...]",
},
],
}
}

View File

@@ -63,6 +63,7 @@
"editor.tabSize": 4,
"editor.defaultFormatter": "xaver.clang-format",
},
"clangd.arguments": ["-header-insertion=never"],
// JavaScript
"prettier.enable": true,

View File

@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
{% details summary="For Ubuntu users" %}
TL;DR: Ubuntu 22.04 is suggested.
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
{% /details %}
## Install Dependencies
Using your system's package manager, install Bun's dependencies:
@@ -53,7 +58,7 @@ $ brew install bun
## Install LLVM
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
{% codetabs group="os" %}
@@ -84,7 +89,7 @@ $ sudo zypper install clang16 lld16 llvm16
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
Make sure Clang/LLVM 18 is in your path:
Make sure Clang/LLVM 16 is in your path:
```bash
$ which clang-16

2
LATEST
View File

@@ -1 +1 @@
1.1.40
1.1.38

View File

@@ -327,25 +327,6 @@ pub fn build(b: *Build) !void {
.{ .os = .windows, .arch = .x86_64 },
});
}
// zig build translate-c-headers
{
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step);
}
// zig build enum-extractor
{
// const step = b.step("enum-extractor", "Extract enum definitions (invoked by a code generator)");
// const exe = b.addExecutable(.{
// .name = "enum_extractor",
// .root_source_file = b.path("./src/generated_enum_extractor.zig"),
// .target = b.graph.host,
// .optimize = .Debug,
// });
// const run = b.addRunArtifact(exe);
// step.dependOn(&run.step);
}
}
pub fn addMultiCheck(
@@ -386,25 +367,6 @@ pub fn addMultiCheck(
}
}
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
const translate_c = b.addTranslateC(.{
.root_source_file = b.path("src/c-headers-for-zig.h"),
.target = target,
.optimize = optimize,
.link_libc = true,
});
inline for ([_](struct { []const u8, bool }){
.{ "WINDOWS", translate_c.target.result.os.tag == .windows },
.{ "POSIX", translate_c.target.result.os.tag != .windows },
.{ "LINUX", translate_c.target.result.os.tag == .linux },
.{ "DARWIN", translate_c.target.result.os.tag.isDarwin() },
}) |entry| {
const str, const value = entry;
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
}
return translate_c;
}
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
const obj = b.addObject(.{
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
@@ -452,10 +414,6 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
}
addInternalPackages(b, obj, opts);
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
const translate_c = getTranslateC(b, opts.target, opts.optimize);
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
return obj;
}

BIN
bun.lockb

Binary file not shown.

View File

@@ -176,10 +176,6 @@ if(LINUX)
DESCRIPTION "Disable relocation read-only (RELRO)"
-Wl,-z,norelro
)
register_compiler_flags(
DESCRIPTION "Disable semantic interposition"
-fno-semantic-interposition
)
endif()
# --- Assertions ---

View File

@@ -291,7 +291,7 @@ function(find_command)
set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND)
if(version)
satisfies_range(${version} ${FIND_VERSION} ${variable})
satisfies_range(${version} ${${FIND_VERSION_VARIABLE}} ${variable})
set(${variable} ${${variable}} PARENT_SCOPE)
endif()
endfunction()

View File

@@ -20,7 +20,7 @@ else()
setx(RELEASE OFF)
endif()
if(CMAKE_BUILD_TYPE MATCHES "Debug")
if(CMAKE_BUILD_TYPE MATCHES "Debug|RelWithDebInfo")
setx(DEBUG ON)
else()
setx(DEBUG OFF)
@@ -67,7 +67,13 @@ optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT $
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
if(ENABLE_CANARY)
if(ENABLE_CANARY AND BUILDKITE)
execute_process(
COMMAND buildkite-agent meta-data get "canary"
OUTPUT_VARIABLE DEFAULT_CANARY_REVISION
OUTPUT_STRIP_TRAILING_WHITESPACE
)
elseif(ENABLE_CANARY)
set(DEFAULT_CANARY_REVISION "1")
else()
set(DEFAULT_CANARY_REVISION "0")

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
oven-sh/boringssl
COMMIT
914b005ef3ece44159dca0ffad74eb42a9f6679f
29a2cd359458c9384694b75456026e4b57e3e567
)
register_cmake_command(

View File

@@ -318,13 +318,13 @@ register_command(
TARGET
bun-bake-codegen
COMMENT
"Bundling Bake Runtime"
"Bundling Kit Runtime"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
--debug=${DEBUG}
--codegen-root=${CODEGEN_PATH}
--codegen_root=${CODEGEN_PATH}
SOURCES
${BUN_BAKE_RUNTIME_SOURCES}
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
@@ -334,39 +334,6 @@ register_command(
${BUN_BAKE_RUNTIME_OUTPUTS}
)
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
file(GLOB_RECURSE BUN_BINDGEN_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/**/*.bind.ts
)
set(BUN_BINDGEN_CPP_OUTPUTS
${CODEGEN_PATH}/GeneratedBindings.cpp
)
set(BUN_BINDGEN_ZIG_OUTPUTS
${CWD}/src/bun.js/bindings/GeneratedBindings.zig
)
register_command(
TARGET
bun-binding-generator
COMMENT
"Processing \".bind.ts\" files"
COMMAND
${BUN_EXECUTABLE}
run
${BUN_BINDGEN_SCRIPT}
--debug=${DEBUG}
--codegen-root=${CODEGEN_PATH}
SOURCES
${BUN_BINDGEN_SOURCES}
${BUN_BINDGEN_SCRIPT}
OUTPUTS
${BUN_BINDGEN_CPP_OUTPUTS}
${BUN_BINDGEN_ZIG_OUTPUTS}
)
set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts)
set(BUN_JS_SINK_SOURCES
@@ -418,6 +385,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
${CODEGEN_PATH}/NodeModuleModule.lut.h
)
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
set(_tmp)
get_source_file_property(_tmp ${_source} OBJECT_DEPENDS)
@@ -493,7 +461,6 @@ list(APPEND BUN_ZIG_SOURCES
${CWD}/build.zig
${CWD}/root.zig
${CWD}/root_wasm.zig
${BUN_BINDGEN_ZIG_OUTPUTS}
)
set(BUN_ZIG_GENERATED_SOURCES
@@ -515,6 +482,7 @@ endif()
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(APPLE)
set(ZIG_CPU "apple_m1")
@@ -576,7 +544,6 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
${CWD}/src/io/*.cpp
${CWD}/src/bun.js/modules/*.cpp
@@ -601,8 +568,6 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
if(WIN32)
list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
endif()
register_repository(
@@ -635,14 +600,12 @@ register_command(
list(APPEND BUN_CPP_SOURCES
${BUN_C_SOURCES}
${BUN_CXX_SOURCES}
${BUN_ERROR_CODE_OUTPUTS}
${VENDOR_PATH}/picohttpparser/picohttpparser.c
${NODEJS_HEADERS_PATH}/include/node/node_version.h
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
${BUN_JS_SINK_OUTPUTS}
${BUN_JAVASCRIPT_OUTPUTS}
${BUN_OBJECT_LUT_OUTPUTS}
${BUN_BINDGEN_CPP_OUTPUTS}
)
if(WIN32)
@@ -652,19 +615,11 @@ if(WIN32)
set(Bun_VERSION_WITH_TAG ${VERSION})
endif()
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
configure_file(${CWD}/src/bun.ico ${CODEGEN_PATH}/bun.ico COPYONLY)
configure_file(
${CWD}/src/windows-app-info.rc
${CODEGEN_PATH}/windows-app-info.rc
@ONLY
)
add_custom_command(
OUTPUT ${CODEGEN_PATH}/windows-app-info.res
COMMAND rc.exe /fo ${CODEGEN_PATH}/windows-app-info.res ${CODEGEN_PATH}/windows-app-info.rc
DEPENDS ${CODEGEN_PATH}/windows-app-info.rc ${CODEGEN_PATH}/bun.ico
COMMENT "Adding Windows resource file ${CODEGEN_PATH}/windows-app-info.res with ico in ${CODEGEN_PATH}/bun.ico"
)
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.res)
list(APPEND BUN_CPP_SOURCES ${CODEGEN_PATH}/windows-app-info.rc)
endif()
# --- Executable ---
@@ -672,7 +627,7 @@ endif()
set(BUN_CPP_OUTPUT ${BUILD_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${bun}${CMAKE_STATIC_LIBRARY_SUFFIX})
if(BUN_LINK_ONLY)
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT} ${WINDOWS_RESOURCES})
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT})
set_target_properties(${bun} PROPERTIES LINKER_LANGUAGE CXX)
target_link_libraries(${bun} PRIVATE ${BUN_CPP_OUTPUT})
elseif(BUN_CPP_ONLY)
@@ -690,7 +645,7 @@ elseif(BUN_CPP_ONLY)
${BUN_CPP_OUTPUT}
)
else()
add_executable(${bun} ${BUN_CPP_SOURCES} ${WINDOWS_RESOURCES})
add_executable(${bun} ${BUN_CPP_SOURCES})
target_link_libraries(${bun} PRIVATE ${BUN_ZIG_OUTPUT})
endif()
@@ -896,28 +851,48 @@ endif()
if(LINUX)
if(NOT ABI STREQUAL "musl")
# on arm64
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
if(ARCH STREQUAL "aarch64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl64
-Wl,--wrap=statx
)
endif()
if(ARCH STREQUAL "x64")
target_link_options(${bun} PUBLIC
-Wl,--wrap=fcntl
-Wl,--wrap=fcntl64
-Wl,--wrap=fstat
-Wl,--wrap=fstat64
-Wl,--wrap=fstatat
-Wl,--wrap=fstatat64
-Wl,--wrap=lstat
-Wl,--wrap=lstat64
-Wl,--wrap=mknod
-Wl,--wrap=mknodat
-Wl,--wrap=stat
-Wl,--wrap=stat64
-Wl,--wrap=statx
)
endif()
target_link_options(${bun} PUBLIC
-Wl,--wrap=cosf
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=fcntl64
-Wl,--wrap=fmod
-Wl,--wrap=fmodf
-Wl,--wrap=log
-Wl,--wrap=log10f
-Wl,--wrap=log2
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=pow
-Wl,--wrap=powf
-Wl,--wrap=sincosf
-Wl,--wrap=sinf
-Wl,--wrap=tanf
)
else()
target_link_options(${bun} PUBLIC
-Wl,--wrap=exp
-Wl,--wrap=expf
-Wl,--wrap=log2f
-Wl,--wrap=logf
-Wl,--wrap=powf
)
endif()
endif()
if(NOT ABI STREQUAL "musl")
@@ -946,7 +921,7 @@ if(LINUX)
-Wl,-z,combreloc
-Wl,--no-eh-frame-hdr
-Wl,--sort-section=name
-Wl,--hash-style=both
-Wl,--hash-style=gnu
-Wl,--build-id=sha1 # Better for debugging than default
-Wl,-Map=${bun}.linker-map
)
@@ -958,7 +933,6 @@ if(WIN32)
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.def)
target_link_options(${bun} PUBLIC /DEF:${BUN_SYMBOLS_PATH})
elseif(APPLE)
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.txt)
target_link_options(${bun} PUBLIC -exported_symbols_list ${BUN_SYMBOLS_PATH})
else()

View File

@@ -4,7 +4,7 @@ register_repository(
REPOSITORY
c-ares/c-ares
COMMIT
4f4912bce7374f787b10576851b687935f018e17
41ee334af3e3d0027dca5e477855d0244936bd49
)
register_cmake_command(

View File

@@ -18,7 +18,7 @@ register_cmake_command(
-DENABLE_INSTALL=OFF
-DENABLE_TEST=OFF
-DENABLE_WERROR=OFF
-DENABLE_BZip2=OFF
-DENABLE_BZIP2=OFF
-DENABLE_CAT=OFF
-DENABLE_EXPAT=OFF
-DENABLE_ICONV=OFF

View File

@@ -49,8 +49,6 @@ register_command(
CARGO_TERM_VERBOSE=true
CARGO_TERM_DIAGNOSTIC=true
CARGO_ENCODED_RUSTFLAGS=${RUSTFLAGS}
CARGO_HOME=${CARGO_HOME}
RUSTUP_HOME=${RUSTUP_HOME}
)
target_link_libraries(${bun} PRIVATE ${LOLHTML_LIBRARY})

View File

@@ -5,11 +5,6 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none")
return()
endif()
if (CI AND NOT APPLE)
setenv(CCACHE_DISABLE 1)
return()
endif()
find_command(
VARIABLE
CCACHE_PROGRAM
@@ -43,8 +38,7 @@ setenv(CCACHE_FILECLONE 1)
setenv(CCACHE_STATSLOG ${BUILD_PATH}/ccache.log)
if(CI)
# FIXME: Does not work on Ubuntu 18.04
# setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
else()
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd")
endif()

View File

@@ -1,18 +1,14 @@
set(DEFAULT_ENABLE_LLVM ON)
# if target is bun-zig, set ENABLE_LLVM to OFF
if(TARGET bun-zig)
set(DEFAULT_ENABLE_LLVM OFF)
endif()
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ${DEFAULT_ENABLE_LLVM})
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON)
if(NOT ENABLE_LLVM)
return()
endif()
set(DEFAULT_LLVM_VERSION "18.1.8")
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
set(DEFAULT_LLVM_VERSION "18.1.8")
else()
set(DEFAULT_LLVM_VERSION "16.0.6")
endif()
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
@@ -77,7 +73,7 @@ macro(find_llvm_command variable command)
VERSION_VARIABLE LLVM_VERSION
COMMAND ${commands}
PATHS ${LLVM_PATHS}
VERSION >=${LLVM_VERSION_MAJOR}.1.0
VERSION ${LLVM_VERSION}
)
list(APPEND CMAKE_ARGS -D${variable}=${${variable}})
endmacro()

View File

@@ -1,42 +1,15 @@
if(DEFINED ENV{CARGO_HOME})
set(CARGO_HOME $ENV{CARGO_HOME})
elseif(CMAKE_HOST_WIN32)
set(CARGO_HOME $ENV{USERPROFILE}/.cargo)
if(NOT EXISTS ${CARGO_HOME})
set(CARGO_HOME $ENV{PROGRAMFILES}/Rust/cargo)
endif()
else()
set(CARGO_HOME $ENV{HOME}/.cargo)
endif()
if(DEFINED ENV{RUSTUP_HOME})
set(RUSTUP_HOME $ENV{RUSTUP_HOME})
elseif(CMAKE_HOST_WIN32)
set(RUSTUP_HOME $ENV{USERPROFILE}/.rustup)
if(NOT EXISTS ${RUSTUP_HOME})
set(RUSTUP_HOME $ENV{PROGRAMFILES}/Rust/rustup)
endif()
else()
set(RUSTUP_HOME $ENV{HOME}/.rustup)
endif()
find_command(
VARIABLE
CARGO_EXECUTABLE
COMMAND
cargo
PATHS
${CARGO_HOME}/bin
$ENV{HOME}/.cargo/bin
REQUIRED
OFF
)
if(EXISTS ${CARGO_EXECUTABLE})
if(CARGO_EXECUTABLE MATCHES "^${CARGO_HOME}")
setx(CARGO_HOME ${CARGO_HOME})
setx(RUSTUP_HOME ${RUSTUP_HOME})
endif()
return()
endif()

View File

@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
if(NOT WEBKIT_VERSION)
set(WEBKIT_VERSION 3845bf370ff4e9a5c0b96036255142c7904be963)
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
endif()
if(WEBKIT_LOCAL)

View File

@@ -671,7 +671,7 @@ _bun() {
cmd)
local -a scripts_list
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes i))
scripts="scripts:scripts:((${scripts_list//:/\\\\:}))"
scripts="scripts:scripts:(($scripts_list))"
IFS=$'\n' files_list=($(SHELL=zsh bun getcompletes j))
main_commands=(
@@ -871,8 +871,8 @@ _bun_run_param_script_completion() {
IFS=$'\n' scripts_list=($(SHELL=zsh bun getcompletes s))
IFS=$'\n' bins=($(SHELL=zsh bun getcompletes b))
_alternative "scripts:scripts:((${scripts_list//:/\\\\:}))"
_alternative "bin:bin:((${bins//:/\\\\:}))"
_alternative "scripts:scripts:(($scripts_list))"
_alternative "bin:bin:(($bins))"
_alternative "files:file:_files -g '*.(js|ts|jsx|tsx|wasm)'"
}

View File

@@ -234,7 +234,7 @@ To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful
```ts
import { dns } from "bun";
dns.prefetch("bun.sh");
dns.prefetch("bun.sh", 443);
```
#### DNS caching

View File

@@ -771,28 +771,3 @@ console.log(obj); // => { foo: "bar" }
```
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
## `estimateShallowMemoryUsageOf` in `bun:jsc`
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.
```js
import { estimateShallowMemoryUsageOf } from "bun:jsc";
const obj = { foo: "bar" };
const usage = estimateShallowMemoryUsageOf(obj);
console.log(usage); // => 16
const buffer = Buffer.alloc(1024 * 1024);
estimateShallowMemoryUsageOf(buffer);
// => 1048624
const req = new Request("https://bun.sh");
estimateShallowMemoryUsageOf(req);
// => 167
const array = Array(1024).fill({ a: 1 });
// Arrays are usually not stored contiguously in memory, so this will not return a useful value (which isn't a bug).
estimateShallowMemoryUsageOf(array);
// => 16
```

View File

@@ -279,19 +279,6 @@ $ bun build --compile --asset-naming="[name].[ext]" ./index.ts
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
## Windows-specific flags
When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file:
- `--windows-icon=path/to/icon.ico` to customize the executable file icon.
- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY.
{% callout %}
These flags currently cannot be used when cross-compiling because they depend on Windows APIs.
{% /callout %}
## Unsupported CLI arguments
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:

View File

@@ -546,113 +546,6 @@ export type ImportKind =
By design, the manifest is a simple JSON object that can easily be serialized or written to disk. It is also compatible with esbuild's [`metafile`](https://esbuild.github.io/api/#metafile) format. -->
### `env`
Controls how environment variables are handled during bundling. Internally, this uses `define` to inject environment variables into the bundle, but makes it easier to specify the environment variables to inject.
#### `env: "inline"`
Injects environment variables into the bundled output by converting `process.env.FOO` references to string literals containing the actual environment variable values.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.tsx'],
outdir: './out',
env: "inline",
})
```
```bash#CLI
$ FOO=bar BAZ=123 bun build ./index.tsx --outdir ./out --env inline
```
{% /codetabs %}
For the input below:
```js#input.js
console.log(process.env.FOO);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js#output.js
console.log("bar");
console.log("123");
```
#### `env: "PUBLIC_*"` (prefix)
Inlines environment variables matching the given prefix (the part before the `*` character), replacing `process.env.FOO` with the actual environment variable value. This is useful for selectively inlining environment variables for things like public-facing URLs or client-side tokens, without worrying about injecting private credentials into output bundles.
{% codetabs group="a" %}
```ts#JavaScript
await Bun.build({
entrypoints: ['./index.tsx'],
outdir: './out',
// Inline all env vars that start with "ACME_PUBLIC_"
env: "ACME_PUBLIC_*",
})
```
```bash#CLI
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com bun build ./index.tsx --outdir ./out --env 'ACME_PUBLIC_*'
```
{% /codetabs %}
For example, given the following environment variables:
```bash
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
```
And source code:
```ts#index.tsx
console.log(process.env.FOO);
console.log(process.env.ACME_PUBLIC_URL);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js
console.log(process.env.FOO);
console.log("https://acme.com");
console.log(process.env.BAZ);
```
#### `env: "disable"`
Disables environment variable injection entirely.
For example, given the following environment variables:
```bash
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
```
And source code:
```ts#index.tsx
console.log(process.env.FOO);
console.log(process.env.ACME_PUBLIC_URL);
console.log(process.env.BAZ);
```
The generated bundle will contain the following code:
```js
console.log(process.env.FOO);
console.log(process.env.BAZ);
```
### `sourcemap`
Specifies the type of sourcemap to generate.
@@ -1259,7 +1152,7 @@ $ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=any
### `experimentalCss`
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`. In 1.2, this property will be deleted, and CSS bundling will always be enabled.
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`.
This supports bundling CSS files imported from JS, as well as CSS entrypoints.
@@ -1275,12 +1168,6 @@ const result = await Bun.build({
{% /codetabs %}
### `throw`
If set to `true`, `Bun.build` will throw on build failure. See the section ["Logs and Errors"](#logs-and-errors) for more details on the error message structure.
In 1.2, this will default to `true`, with the previous behavior as `throw: false`
## Outputs
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
@@ -1420,70 +1307,7 @@ Refer to [Bundler > Executables](https://bun.sh/docs/bundler/executables) for co
## Logs and errors
<!-- 1.2 documentation -->
<!-- On failure, `Bun.build` returns a rejected promise with an `AggregateError`. This can be logged to the console for pretty printing of the error list, or programmatically read with a `try`/`catch` block.
```ts
try {
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
} catch (e) {
// TypeScript does not allow annotations on the catch clause
const error = e as AggregateError;
console.error("Build Failed");
// Example: Using the built-in formatter
console.error(error);
// Example: Serializing the failure as a JSON string.
console.error(JSON.stringify(error, null, 2));
}
```
{% callout %}
Most of the time, an explicit `try`/`catch` is not needed, as Bun will neatly print uncaught exceptions. It is enough to just use a top-level `await` on the `Bun.build` call.
{% /callout %}
Each item in `error.errors` is an instance of `BuildMessage` or `ResolveMessage` (subclasses of Error), containing detailed information for each error.
```ts
class BuildMessage {
name: string;
position?: Position;
message: string;
level: "error" | "warning" | "info" | "debug" | "verbose";
}
class ResolveMessage extends BuildMessage {
code: string;
referrer: string;
specifier: string;
importKind: ImportKind;
}
```
On build success, the returned object contains a `logs` property, which contains bundler warnings and info messages.
```ts
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
if (result.logs.length > 0) {
console.warn("Build succeeded with warnings:");
for (const message of result.logs) {
// Bun will pretty print the message object
console.warn(message);
}
}
``` -->
By default, `Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
`Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
```ts
const result = await Bun.build({
@@ -1526,27 +1350,6 @@ if (!result.success) {
}
```
In Bun 1.2, throwing an aggregate error like this will become the default beahavior. You can opt-into it early using the `throw: true` option.
```ts
try {
const result = await Bun.build({
entrypoints: ["./index.tsx"],
outdir: "./out",
});
} catch (e) {
// TypeScript does not allow annotations on the catch clause
const error = e as AggregateError;
console.error("Build Failed");
// Example: Using the built-in formatter
console.error(error);
// Example: Serializing the failure as a JSON string.
console.error(JSON.stringify(error, null, 2));
}
```
## Reference
```ts
@@ -1568,23 +1371,39 @@ interface BuildConfig {
*
* @default "esm"
*/
format?: "esm" | "cjs" | "iife";
format?: /**
* ECMAScript Module format
*/
| "esm"
/**
* CommonJS format
* **Experimental**
*/
| "cjs"
/**
* IIFE format
* **Experimental**
*/
| "iife";
naming?:
| string
| {
chunk?: string;
entry?: string;
asset?: string;
};
}; // | string;
root?: string; // project root
splitting?: boolean; // default true, enable code splitting
plugins?: BunPlugin[];
// manifest?: boolean; // whether to return manifest
external?: string[];
packages?: "bundle" | "external";
publicPath?: string;
define?: Record<string, string>;
// origin?: string; // e.g. http://mydomain.com
loader?: { [k in string]: Loader };
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
/**
* package.json `exports` conditions used when resolving imports
*
@@ -1593,18 +1412,6 @@ interface BuildConfig {
* https://nodejs.org/api/packages.html#exports
*/
conditions?: Array<string> | string;
/**
* Controls how environment variables are handled during bundling.
*
* Can be one of:
* - `"inline"`: Injects environment variables into the bundled output by converting `process.env.FOO`
* references to string literals containing the actual environment variable values
* - `"disable"`: Disables environment variable injection entirely
* - A string ending in `*`: Inlines environment variables that match the given prefix.
* For example, `"MY_PUBLIC_*"` will only include env vars starting with "MY_PUBLIC_"
*/
env?: "inline" | "disable" | `${string}*`;
minify?:
| boolean
| {
@@ -1622,6 +1429,20 @@ interface BuildConfig {
* Force emitting @__PURE__ annotations even if minify.whitespace is true.
*/
emitDCEAnnotations?: boolean;
// treeshaking?: boolean;
// jsx?:
// | "automatic"
// | "classic"
// | /* later: "preserve" */ {
// runtime?: "automatic" | "classic"; // later: "preserve"
// /** Only works when runtime=classic */
// factory?: string; // default: "React.createElement"
// /** Only works when runtime=classic */
// fragment?: string; // default: "React.Fragment"
// /** Only works when runtime=automatic */
// importSource?: string; // default: "react"
// };
/**
* Generate bytecode for the output. This can dramatically improve cold
@@ -1634,37 +1455,6 @@ interface BuildConfig {
* @default false
*/
bytecode?: boolean;
/**
* Add a banner to the bundled code such as "use client";
*/
banner?: string;
/**
* Add a footer to the bundled code such as a comment block like
*
* `// made with bun!`
*/
footer?: string;
/**
* **Experimental**
*
* Enable CSS support.
*/
experimentalCss?: boolean;
/**
* Drop function calls to matching property accesses.
*/
drop?: string[];
/**
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
*
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
* as most usage of `Bun.build` forgets to check for errors.
*/
throw?: boolean;
}
interface BuildOutput {
@@ -1722,3 +1512,32 @@ declare class ResolveMessage {
toString(): string;
}
```
<!--
interface BuildManifest {
inputs: {
[path: string]: {
output: {
path: string;
};
imports: {
path: string;
kind: ImportKind;
external?: boolean;
asset?: boolean; // whether the import defaulted to "file" loader
}[];
};
};
outputs: {
[path: string]: {
type: "chunk" | "entrypoint" | "asset";
inputs: { path: string }[];
imports: {
path: string;
kind: ImportKind;
external?: boolean;
}[];
exports: string[];
};
};
} -->

View File

@@ -2,47 +2,11 @@ Bun provides a universal plugin API that can be used to extend both the _runtime
Plugins intercept imports and perform custom loading logic: reading files, transpiling code, etc. They can be used to add support for additional file types, like `.scss` or `.yaml`. In the context of Bun's bundler, plugins can be used to implement framework-level features like CSS extraction, macros, and client-server code co-location.
## Lifecycle hooks
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
- [`onStart()`](#onstart): Run once the bundler has started a bundle
- [`onResolve()`](#onresolve): Run before a module is resolved
- [`onLoad()`](#onload): Run before a module is loaded.
- [`onBeforeParse()`](#onbeforeparse): Run zero-copy native addons in the parser thread before a file is parsed.
### Reference
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
```ts
type PluginBuilder = {
onStart(callback: () => void): void;
onResolve: (
args: { filter: RegExp; namespace?: string },
callback: (args: { path: string; importer: string }) => {
path: string;
namespace?: string;
} | void,
) => void;
onLoad: (
args: { filter: RegExp; namespace?: string },
defer: () => Promise<void>,
callback: (args: { path: string }) => {
loader?: Loader;
contents?: string;
exports?: Record<string, any>;
},
) => void;
config: BuildConfig;
};
type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml";
```
For more complete documentation of the Plugin API, see [Runtime > Plugins](https://bun.sh/docs/runtime/plugins).
## Usage
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function.
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function. Register a plugin with Bun using the `plugin` function.
```tsx#myPlugin.ts
import type { BunPlugin } from "bun";
@@ -58,343 +22,9 @@ const myPlugin: BunPlugin = {
This plugin can be passed into the `plugins` array when calling `Bun.build`.
```ts
await Bun.build({
Bun.build({
entrypoints: ["./app.ts"],
outdir: "./out",
plugins: [myPlugin],
});
```
## Plugin lifecycle
### Namespaces
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace?
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
The default namespace is `"file"` and it is not necessary to specify it, for instance: `import myModule from "./my-module.ts"` is the same as `import myModule from "file:./my-module.ts"`.
Other common namespaces are:
- `"bun"`: for Bun-specific modules (e.g. `"bun:test"`, `"bun:sqlite"`)
- `"node"`: for Node.js modules (e.g. `"node:fs"`, `"node:path"`)
### `onStart`
```ts
onStart(callback: () => void): Promise<void> | void;
```
Registers a callback to be run when the bundler starts a new bundle.
```ts
import { plugin } from "bun";
plugin({
name: "onStart example",
setup(build) {
build.onStart(() => {
console.log("Bundle started!");
});
},
});
```
The callback can return a `Promise`. After the bundle process has initialized, the bundler waits until all `onStart()` callbacks have completed before continuing.
For example:
```ts
const result = await Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
plugins: [
{
name: "Sleep for 10 seconds",
setup(build) {
build.onStart(async () => {
await Bunlog.sleep(10_000);
});
},
},
{
name: "Log bundle time to a file",
setup(build) {
build.onStart(async () => {
const now = Date.now();
await Bun.$`echo ${now} > bundle-time.txt`;
});
},
},
],
});
```
In the above example, Bun will wait until the first `onStart()` (sleeping for 10 seconds) has completed, _as well as_ the second `onStart()` (writing the bundle time to a file).
Note that `onStart()` callbacks (like every other lifecycle callback) do not have the ability to modify the `build.config` object. If you want to mutate `build.config`, you must do so directly in the `setup()` function.
### `onResolve`
```ts
onResolve(
args: { filter: RegExp; namespace?: string },
callback: (args: { path: string; importer: string }) => {
path: string;
namespace?: string;
} | void,
): void;
```
To bundle your project, Bun walks down the dependency tree of all modules in your project. For each imported module, Bun actually has to find and read that module. The "finding" part is known as "resolving" a module.
The `onResolve()` plugin lifecycle callback allows you to configure how a module is resolved.
The first argument to `onResolve()` is an object with a `filter` and [`namespace`](#what-is-a-namespace) property. The filter is a regular expression which is run on the import string. Effectively, these allow you to filter which modules your custom resolution logic will apply to.
The second argument to `onResolve()` is a callback which is run for each module import Bun finds that matches the `filter` and `namespace` defined in the first argument.
The callback receives as input the _path_ to the matching module. The callback can return a _new path_ for the module. Bun will read the contents of the _new path_ and parse it as a module.
For example, redirecting all imports to `images/` to `./public/images/`:
```ts
import { plugin } from "bun";
plugin({
name: "onResolve example",
setup(build) {
build.onResolve({ filter: /.*/, namespace: "file" }, args => {
if (args.path.startsWith("images/")) {
return {
path: args.path.replace("images/", "./public/images/"),
};
}
});
},
});
```
### `onLoad`
```ts
onLoad(
args: { filter: RegExp; namespace?: string },
defer: () => Promise<void>,
callback: (args: { path: string, importer: string, namespace: string, kind: ImportKind }) => {
loader?: Loader;
contents?: string;
exports?: Record<string, any>;
},
): void;
```
After Bun's bundler has resolved a module, it needs to read the contents of the module and parse it.
The `onLoad()` plugin lifecycle callback allows you to modify the _contents_ of a module before it is read and parsed by Bun.
Like `onResolve()`, the first argument to `onLoad()` allows you to filter which modules this invocation of `onLoad()` will apply to.
The second argument to `onLoad()` is a callback which is run for each matching module _before_ Bun loads the contents of the module into memory.
This callback receives as input the _path_ to the matching module, the _importer_ of the module (the module that imported the module), the _namespace_ of the module, and the _kind_ of the module.
The callback can return a new `contents` string for the module as well as a new `loader`.
For example:
```ts
import { plugin } from "bun";
const envPlugin: BunPlugin = {
name: "env plugin",
setup(build) {
build.onLoad({ filter: /env/, namespace: "file" }, args => {
return {
contents: `export default ${JSON.stringify(process.env)}`,
loader: "js",
};
});
},
});
Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
plugins: [envPlugin],
});
// import env from "env"
// env.FOO === "bar"
```
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
#### `.defer()`
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
This is useful for returning contens of a module that depends on other modules.
##### Example: tracking and reporting unused exports
```ts
import { plugin } from "bun";
plugin({
name: "track imports",
setup(build) {
const transpiler = new Bun.Transpiler();
let trackedImports: Record<string, number> = {};
// Each module that goes through this onLoad callback
// will record its imports in `trackedImports`
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
const contents = await Bun.file(path).arrayBuffer();
const imports = transpiler.scanImports(contents);
for (const i of imports) {
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
}
return undefined;
});
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
// Wait for all files to be loaded, ensuring
// that every file goes through the above `onLoad()` function
// and their imports tracked
await defer();
// Emit JSON containing the stats of each import
return {
contents: `export default ${JSON.stringify(trackedImports)}`,
loader: "json",
};
});
},
});
```
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
## Native plugins
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
These are the following lifecycle hooks which are available to native plugins:
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
### Creating a native plugin in Rust
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
```bash
bun add -g @napi-rs/cli
napi new
```
Then install this crate:
```bash
cargo add bun-native-plugin
```
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
will implement our native plugin.
Here's an example implementing the `onBeforeParse` hook:
```rs
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
use napi_derive::napi;
/// Define the plugin and its name
define_bun_plugin!("replace-foo-with-bar");
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
/// `foo` with `bar`.
///
/// We use the #[bun] macro to generate some of the boilerplate code.
///
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
/// the macro that this function implements the `onBeforeParse` hook.
#[bun]
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
```
And to use it in Bun.build():
```typescript
import myNativeAddon from "./my-native-addon";
Bun.build({
entrypoints: ["./app.tsx"],
plugins: [
{
name: "my-plugin",
setup(build) {
build.onBeforeParse(
{
namespace: "file",
filter: "**/*.tsx",
},
{
napiModule: myNativeAddon,
symbol: "replace_foo_with_bar",
// external: myNativeAddon.getSharedState()
},
);
},
},
],
});
```
### `onBeforeParse`
```ts
onBeforeParse(
args: { filter: RegExp; namespace?: string },
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
): void;
```
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
As input, it receives the file's contents and can optionally return new source code.
This callback can be called from any thread and so the napi module implementation must be thread-safe.

View File

@@ -695,7 +695,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
- In Bun, `minify` can be a boolean or an object.
```ts
await Bun.build({
Bun.build({
entrypoints: ['./index.tsx'],
// enable all minification
minify: true

View File

@@ -47,9 +47,6 @@ registry = "https://registry.yarnpkg.com/"
# Install for production? This is the equivalent to the "--production" CLI argument
production = false
# Save a text-based lockfile? This is equivalent to the "--save-text-lockfile" CLI argument
saveTextLockfile = false
# Disallow changes to lockfile? This is the equivalent to the "--frozen-lockfile" CLI argument
frozenLockfile = false
@@ -57,15 +54,12 @@ frozenLockfile = false
dryRun = true
# Install optionalDependencies (default: true)
# Setting this to false is equivalent to the `--omit=optional` CLI argument
optional = true
# Install local devDependencies (default: true)
# Setting this to false is equivalent to the `--omit=dev` CLI argument
dev = true
# Install peerDependencies (default: true)
# Setting this to false is equivalent to the `--omit=peer` CLI argument
peer = true
# Max number of concurrent lifecycle scripts (default: (cpu count or GOMAXPROCS) x2)
@@ -114,7 +108,6 @@ export interface Install {
scopes: Scopes;
registry: Registry;
production: boolean;
saveTextLockfile: boolean;
frozenLockfile: boolean;
dryRun: boolean;
optional: boolean;

View File

@@ -130,20 +130,6 @@ $ bun install --frozen-lockfile
For more information on Bun's binary lockfile `bun.lockb`, refer to [Package manager > Lockfile](https://bun.sh/docs/install/lockfile).
## Omitting dependencies
To omit dev, peer, or optional dependencies use the `--omit` flag.
```bash
# Exclude "devDependencies" from the installation. This will apply to the
# root package and workspaces if they exist. Transitive dependencies will
# not have "devDependencies".
$ bun install --omit dev
# Install only dependencies from "dependencies"
$ bun install --omit=dev --omit=peer --omit=optional
```
## Dry run
To perform a dry run (i.e. don't actually install anything):
@@ -163,8 +149,7 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
"moment": "git@github.com:moment/moment.git",
"zod": "github:colinhacks/zod",
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
"bun-types": "npm:@types/bun"
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz"
}
}
```
@@ -188,9 +173,6 @@ peer = true
# equivalent to `--production` flag
production = false
# equivalent to `--save-text-lockfile` flag
saveTextLockfile = false
# equivalent to `--frozen-lockfile` flag
frozenLockfile = false

View File

@@ -2,9 +2,7 @@
name: Configure a private registry for an organization scope with bun install
---
Private registries can be configured using either [`.npmrc`](https://bun.sh/docs/install/npmrc) or [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig#install-registry). While both are supported, we recommend using **bunfig.toml** for enhanced flexibility and Bun-specific options.
To configure a registry for a particular npm scope:
Bun does not read `.npmrc` files; instead private registries are configured via `bunfig.toml`. To configure a registry for a particular npm scope:
```toml#bunfig.toml
[install.scopes]

View File

@@ -30,6 +30,7 @@ Bun implements the vast majority of Jest's matchers, but compatibility isn't 100
Some notable missing features:
- `expect().toMatchInlineSnapshot()`
- `expect().toHaveReturned()`
---

View File

@@ -4,6 +4,10 @@ name: Use snapshot testing in `bun test`
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
{% callout %}
The `.toMatchInlineSnapshot()` method is not yet supported.
{% /callout %}
```ts#snap.test.ts
import { test, expect } from "bun:test";
@@ -92,4 +96,4 @@ Ran 1 tests across 1 files. [102.00ms]
---
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.

View File

@@ -1,120 +0,0 @@
---
name: "import, require, and test Svelte components with bun test"
---
Bun's [Plugin API](/docs/runtime/plugins) lets you add custom loaders to your project. The `test.preload` option in `bunfig.toml` lets you configure your loader to start before your tests run.
Firstly, install `@testing-library/svelte`, `svelte`, and `@happy-dom/global-registrator`.
```bash
$ bun add @testing-library/svelte svelte@4 @happy-dom/global-registrator
```
Then, save this plugin in your project.
```ts#svelte-loader.js
import { plugin } from "bun";
import { compile } from "svelte/compiler";
import { readFileSync } from "fs";
import { beforeEach, afterEach } from "bun:test";
import { GlobalRegistrator } from "@happy-dom/global-registrator";
beforeEach(async () => {
await GlobalRegistrator.register();
});
afterEach(async () => {
await GlobalRegistrator.unregister();
});
plugin({
name: "svelte loader",
setup(builder) {
builder.onLoad({ filter: /\.svelte(\?[^.]+)?$/ }, ({ path }) => {
try {
const source = readFileSync(
path.substring(
0,
path.includes("?") ? path.indexOf("?") : path.length
),
"utf-8"
);
const result = compile(source, {
filename: path,
generate: "client",
dev: false,
});
return {
contents: result.js.code,
loader: "js",
};
} catch (err) {
throw new Error(`Failed to compile Svelte component: ${err.message}`);
}
});
},
});
```
---
Add this to `bunfig.toml` to tell Bun to preload the plugin, so it loads before your tests run.
```toml#bunfig.toml
[test]
# Tell Bun to load this plugin before your tests run
preload = ["./svelte-loader.js"]
# This also works:
# test.preload = ["./svelte-loader.js"]
```
---
Add an example `.svelte` file in your project.
```html#Counter.svelte
<script>
export let initialCount = 0;
let count = initialCount;
</script>
<button on:click={() => (count += 1)}>+1</button>
```
---
Now you can `import` or `require` `*.svelte` files in your tests, and it will load the Svelte component as a JavaScript module.
```ts#hello-svelte.test.ts
import { test, expect } from "bun:test";
import { render, fireEvent } from "@testing-library/svelte";
import Counter from "./Counter.svelte";
test("Counter increments when clicked", async () => {
const { getByText, component } = render(Counter);
const button = getByText("+1");
// Initial state
expect(component.$$.ctx[0]).toBe(0); // initialCount is the first prop
// Click the increment button
await fireEvent.click(button);
// Check the new state
expect(component.$$.ctx[0]).toBe(1);
});
```
---
Use `bun test` to run your tests.
```bash
$ bun test
```
---

View File

@@ -49,7 +49,7 @@ Next, add these preload scripts to your `bunfig.toml` (you can also have everyth
```toml#bunfig.toml
[test]
preload = ["./happydom.ts", "./testing-library.ts"]
preload = ["happydom.ts", "testing-library.ts"]
```
---
@@ -84,4 +84,4 @@ test('Can use Testing Library', () => {
---
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.
Refer to the [Testing Library docs](https://testing-library.com/), [Happy DOM repo](https://github.com/capricorn86/happy-dom) and [Docs > Test runner > DOM](https://bun.sh/docs/test/dom) for complete documentation on writing browser tests with Bun.

View File

@@ -4,6 +4,10 @@ name: Update snapshots in `bun test`
Bun's test runner supports Jest-style snapshot testing via `.toMatchSnapshot()`.
{% callout %}
The `.toMatchInlineSnapshot()` method is not yet supported.
{% /callout %}
```ts#snap.test.ts
import { test, expect } from "bun:test";
@@ -43,4 +47,4 @@ Ran 1 tests across 1 files. [102.00ms]
---
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/snapshots) for complete documentation on snapshots with the Bun test runner.
See [Docs > Test Runner > Snapshots](https://bun.sh/docs/test/mocks) for complete documentation on mocking with the Bun test runner.

View File

@@ -55,13 +55,6 @@ To install dependencies without allowing changes to lockfile (useful on CI):
$ bun install --frozen-lockfile
```
To exclude dependency types from installing, use `--omit` with `dev`, `optional`, or `peer`:
```bash
# Disable devDependencies and optionalDependencies
$ bun install --omit=dev --omit=optional
```
To perform a dry run (i.e. don't actually install anything):
```bash
@@ -93,9 +86,6 @@ peer = true
# equivalent to `--production` flag
production = false
# equivalent to `--save-text-lockfile` flag
saveTextLockfile = false
# equivalent to `--frozen-lockfile` flag
frozenLockfile = false

View File

@@ -72,24 +72,6 @@ $ bun install --yarn
print = "yarn"
```
### Text-based lockfile
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at [no cost to performance](https://bun.sh/blog/bun-lock-text-lockfile#cached-bun-install-gets-30-faster).
To generate the lockfile, use `--save-text-lockfile` with `bun install`. You can do this for new projects and existing projects already using `bun.lockb` (resolutions will be preserved).
```bash
$ bun install --save-text-lockfile
$ head -n3 bun.lock
{
"lockfileVersion": 0,
"workspaces": {
```
Once `bun.lock` is generated, Bun will use it for all subsequent installs and updates through commands that read and modify the lockfile. If both lockfiles exist, `bun.lock` will be choosen over `bun.lockb`.
Bun v1.2.0 will switch the default lockfile format to `bun.lock`.
{% /codetabs %}
{% details summary="Configuring lockfile" %}

View File

@@ -6,7 +6,7 @@ Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.co
{% /callout %}
## Supported options
# Supported options
### `registry`: Set the default registry

View File

@@ -402,9 +402,6 @@ export default {
page("project/building-windows", "Building Windows", {
description: "Learn how to setup a development environment for contributing to the Windows build of Bun.",
}),
page("project/bindgen", "Bindgen", {
description: "About the bindgen code generator",
}),
page("project/licensing", "License", {
description: `Bun is a MIT-licensed project with a large number of statically-linked dependencies with various licenses.`,
}),

View File

@@ -1,225 +0,0 @@
{% callout %}
This document is for maintainers and contributors to Bun, and describes internal implementation details.
{% /callout %}
The new bindings generator, introduced to the codebase in Dec 2024, scans for
`*.bind.ts` to find function and class definition, and generates glue code to
interop between JavaScript and native code.
There are currently other code generators and systems that achieve similar
purposes. The following will all eventually be completely phased out in favor of
this one:
- "Classes generator", converting `*.classes.ts` for custom classes.
- "JS2Native", allowing ad-hoc calls from `src/js` to native code.
## Creating JS Functions in Zig
Given a file implementing a simple function, such as `add`
```zig#src/bun.js/math.zig
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
return std.math.add(i32, a, b) catch {
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
// Others like `error.Overflow` from `std.math.add` must be converted.
// Remember to be descriptive.
return global.throwPretty("Integer overflow while adding", .{});
};
}
const gen = bun.gen.math; // "math" being this file's basename
const std = @import("std");
const bun = @import("root").bun;
const JSC = bun.JSC;
```
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
```ts#src/bun.js/math.bind.ts
import { t, fn } from 'bindgen';
export const add = fn({
args: {
global: t.globalObject,
a: t.i32,
b: t.i32.default(1),
},
ret: t.i32,
});
```
This function declaration is equivalent to:
```ts
/**
* Throws if zero arguments are provided.
* Wraps out of range numbers using modulo.
*/
declare function add(a: number, b: number = 1): number;
```
The code generator will provide `bun.gen.math.jsAdd`, which is the native
function implementation. To pass to JavaScript, use
`bun.gen.math.createAddCallback(global)`. JS files in `src/js/` may use
`$bindgenFn("math.bind.ts", "add")` to get a handle to the implementation.
## Strings
The type for receiving strings is one of [`t.DOMString`](https://webidl.spec.whatwg.org/#idl-DOMString), [`t.ByteString`](https://webidl.spec.whatwg.org/#idl-ByteString), and [`t.USVString`](https://webidl.spec.whatwg.org/#idl-USVString). These map directly to their WebIDL counterparts, and have slightly different conversion logic. Bindgen will pass BunString to native code in all cases.
When in doubt, use DOMString.
`t.UTF8String` can be used in place of `t.DOMString`, but will call `bun.String.toUTF8`. The native callback gets `[]const u8` (WTF-8 data) passed to native code, freeing it after the function returns.
TLDRs from WebIDL spec:
- ByteString can only contain valid latin1 characters. It is not safe to assume bun.String is already in 8-bit format, but it is extremely likely.
- USVString will not contain invalid surrogate pairs, aka text that can be represented correctly in UTF-8.
- DOMString is the loosest but also most recommended strategy.
## Function Variants
A `variants` can specify multiple variants (also known as overloads).
```ts#src/bun.js/math.bind.ts
import { t, fn } from 'bindgen';
export const action = fn({
variants: [
{
args: {
a: t.i32,
},
ret: t.i32,
},
{
args: {
a: t.DOMString,
},
ret: t.DOMString,
},
]
});
```
In Zig, each variant gets a number, based on the order the schema defines.
```zig
fn action1(a: i32) i32 {
return a;
}
fn action2(a: bun.String) bun.String {
return a;
}
```
## `t.dictionary`
A `dictionary` is a definition for a JavaScript object, typically as a function inputs. For function outputs, it is usually a smarter idea to declare a class type to add functions and destructuring.
## Enumerations
To use [WebIDL's enumeration](https://webidl.spec.whatwg.org/#idl-enums) type, use either:
- `t.stringEnum`: Create and codegen a new enum type.
- `t.zigEnum`: Derive a bindgen type off of an existing enum in the codebase.
An example of `stringEnum` as used in `fmt.zig` / `bun:internal-for-testing`
```ts
export const Formatter = t.stringEnum(
"highlight-javascript",
"escape-powershell",
);
export const fmtString = fn({
args: {
global: t.globalObject,
code: t.UTF8String,
formatter: Formatter,
},
ret: t.DOMString,
});
```
WebIDL strongly encourages using kebab case for enumeration values, to be consistent with existing Web APIs.
### Deriving enums from Zig code
TODO: zigEnum
## `t.oneOf`
A `oneOf` is a union between two or more types. It is represented by `union(enum)` in Zig.
TODO:
## Attributes
There are set of attributes that can be chained onto `t.*` types. On all types there are:
- `.required`, in dictionary parameters only
- `.optional`, in function arguments only
- `.default(T)`
When a value is optional, it is lowered to a Zig optional.
Depending on the type, there are more attributes available. See the type definitions in auto-complete for more details. Note that one of the above three can only be applied, and they must be applied at the end.
### Integer Attributes
Integer types allow customizing the overflow behavior with `clamp` or `enforceRange`
```ts
import { t, fn } from "bindgen";
export const add = fn({
args: {
global: t.globalObject,
// enforce in i32 range
a: t.i32.enforceRange(),
// clamp to u16 range
b: t.u16,
// enforce in arbitrary range, with a default if not provided
c: t.i32.enforceRange(0, 1000).default(5),
// clamp to arbitrary range, or null
d: t.u16.clamp(0, 10).optional,
},
ret: t.i32,
});
```
Various Node.js validator functions such as `validateInteger`, `validateNumber`, and more are available. Use these when implementing Node.js APIs, so the error messages match 1:1 what Node would do.
Unlike `enforceRange`, which is taken from WebIDL, `validate*` functions are much more strict on the input they accept. For example, Node's numerical validator check `typeof value === 'number'`, while WebIDL uses `ToNumber` for lossy conversion.
```ts
import { t, fn } from "bindgen";
export const add = fn({
args: {
global: t.globalObject,
// throw if not given a number
a: t.f64.validateNumber(),
// valid in i32 range
a: t.i32.validateInt32(),
// f64 within safe integer range
b: t.f64.validateInteger(),
// f64 in given range
c: t.f64.validateNumber(-10000, 10000),
},
ret: t.i32,
});
```
## Callbacks
TODO
## Classes
TODO

View File

@@ -238,17 +238,6 @@ By default Bun uses caret ranges; if the `latest` version of a package is `2.4.1
exact = false
```
### `install.saveTextLockfile`
Generate `bun.lock`, a human-readable text-based lockfile. Once generated, Bun will use this file instead of `bun.lockb`, choosing it over the binary lockfile if both are present.
Default `false`. In Bun v1.2.0 the default lockfile format will change to `bun.lock`.
```toml
[install]
saveTextLockfile = true
```
<!--
### `install.prefer`

View File

@@ -259,7 +259,6 @@ await Bun.build({
conditions: ["react-server"],
target: "bun",
entryPoints: ["./app/foo/route.js"],
throw: true,
});
```

View File

@@ -307,7 +307,7 @@ await import("my-object-virtual-module"); // { baz: "quix" }
Plugins can read and write to the [build config](https://bun.sh/docs/bundler#api) with `build.config`.
```ts
await Bun.build({
Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
@@ -324,7 +324,6 @@ await Bun.build({
},
},
],
throw: true,
});
```
@@ -333,7 +332,7 @@ await Bun.build({
**NOTE**: Plugin lifcycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
```ts
await Bun.build({
Bun.build({
entrypoints: ["./app.ts"],
outdir: "./dist",
sourcemap: "external",
@@ -351,13 +350,12 @@ await Bun.build({
},
},
],
throw: true,
});
```
{% /callout %}
## Lifecycle hooks
## Lifecycle callbacks
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
@@ -365,8 +363,6 @@ Plugins can register callbacks to be run at various points in the lifecycle of a
- [`onResolve()`](#onresolve): Run before a module is resolved
- [`onLoad()`](#onload): Run before a module is loaded.
### Reference
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
```ts
@@ -555,3 +551,55 @@ plugin({
```
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
#### `.defer()`
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
This is useful for returning contens of a module that depends on other modules.
##### Example: tracking and reporting unused exports
```ts
import { plugin } from "bun";
plugin({
name: "track imports",
setup(build) {
const transpiler = new Bun.Transpiler();
let trackedImports: Record<string, number> = {};
// Each module that goes through this onLoad callback
// will record its imports in `trackedImports`
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
const contents = await Bun.file(path).arrayBuffer();
const imports = transpiler.scanImports(contents);
for (const i of imports) {
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
}
return undefined;
});
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
// Wait for all files to be loaded, ensuring
// that every file goes through the above `onLoad()` function
// and their imports tracked
await defer();
// Emit JSON containing the stats of each import
return {
contents: `export default ${JSON.stringify(trackedImports)}`,
loader: "json",
};
});
},
});
```
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.

View File

@@ -531,17 +531,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap
---
-
-
- [`.toMatchInlineSnapshot()`](https://jestjs.io/docs/expect#tomatchinlinesnapshotpropertymatchers-inlinesnapshot)
---
-
-
- [`.toThrowErrorMatchingSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchingsnapshothint)
---
-
-
- [`.toThrowErrorMatchingInlineSnapshot()`](https://jestjs.io/docs/expect#tothrowerrormatchinginlinesnapshotinlinesnapshot)
{% /table %}

View File

@@ -14,7 +14,5 @@
<true/>
<key>com.apple.security.get-task-allow</key>
<true/>
<key>com.apple.security.cs.debugger</key>
<true/>
</dict>
</plist>
</plist>

View File

@@ -1,7 +0,0 @@
process handle -p true -s false -n false SIGUSR1
command script import misctools/lldb/lldb_pretty_printers.py
type category enable zig.lang
type category enable zig.std
command script import misctools/lldb/lldb_webkit.py

View File

@@ -1,733 +0,0 @@
# https://github.com/ziglang/zig/blob/master/tools/lldb_pretty_printers.py
# pretty printing for the zig language, zig standard library, and zig stage 2 compiler.
# put commands in ~/.lldbinit to run them automatically when starting lldb
# `command script import /path/to/zig/tools/lldb_pretty_printers.py` to import this file
# `type category enable zig.lang` to enable pretty printing for the zig language
# `type category enable zig.std` to enable pretty printing for the zig standard library
# `type category enable zig.stage2` to enable pretty printing for the zig stage 2 compiler
import lldb
import re
page_size = 1 << 12
def log2_int(i): return i.bit_length() - 1
# Define Zig Language
zig_keywords = {
'addrspace',
'align',
'allowzero',
'and',
'anyframe',
'anytype',
'asm',
'async',
'await',
'break',
'callconv',
'catch',
'comptime',
'const',
'continue',
'defer',
'else',
'enum',
'errdefer',
'error',
'export',
'extern',
'fn',
'for',
'if',
'inline',
'noalias',
'noinline',
'nosuspend',
'opaque',
'or',
'orelse',
'packed',
'pub',
'resume',
'return',
'linksection',
'struct',
'suspend',
'switch',
'test',
'threadlocal',
'try',
'union',
'unreachable',
'usingnamespace',
'var',
'volatile',
'while',
}
zig_primitives = {
'anyerror',
'anyframe',
'anyopaque',
'bool',
'c_int',
'c_long',
'c_longdouble',
'c_longlong',
'c_short',
'c_uint',
'c_ulong',
'c_ulonglong',
'c_ushort',
'comptime_float',
'comptime_int',
'f128',
'f16',
'f32',
'f64',
'f80',
'false',
'isize',
'noreturn',
'null',
'true',
'type',
'undefined',
'usize',
'void',
}
zig_integer_type = re.compile('[iu][1-9][0-9]+')
zig_identifier_regex = re.compile('[A-Z_a-z][0-9A-Z_a-z]*')
def zig_IsVariableName(string): return string != '_' and string not in zig_keywords and string not in zig_primitives and not zig_integer_type.fullmatch(string) and zig_identifier_regex.fullmatch(string)
def zig_IsFieldName(string): return string not in zig_keywords and zig_identifier_regex.fullmatch(string)
class zig_Slice_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.ptr = self.value.GetChildMemberWithName('ptr')
self.len = self.value.GetChildMemberWithName('len').unsigned if self.ptr.unsigned > page_size else 0
self.elem_type = self.ptr.type.GetPointeeType()
self.elem_size = self.elem_type.size
except: pass
def has_children(self): return True
def num_children(self): return self.len or 0
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
if index not in range(self.len): return None
try: return self.ptr.CreateChildAtOffset('[%d]' % index, index * self.elem_size, self.elem_type)
except: return None
def zig_String_decode(value, offset=0, length=None):
try:
value = value.GetNonSyntheticValue()
data = value.GetChildMemberWithName('ptr').GetPointeeData(offset, length if length is not None else value.GetChildMemberWithName('len').unsigned)
b = bytes(data.uint8)
b = b.replace(b'\\', b'\\\\')
b = b.replace(b'\n', b'\\n')
b = b.replace(b'\r', b'\\r')
b = b.replace(b'\t', b'\\t')
b = b.replace(b'"', b'\\"')
b = b.replace(b'\'', b'\\\'')
s = b.decode(encoding='ascii', errors='backslashreplace')
return s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
except: return None
def zig_String_SummaryProvider(value, _=None): return '"%s"' % zig_String_decode(value)
def zig_String_AsIdentifier(value, pred):
string = zig_String_decode(value)
return string if pred(string) else '@"%s"' % string
class zig_Optional_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.child = self.value.GetChildMemberWithName('some').unsigned == 1 and self.value.GetChildMemberWithName('data').Clone('child')
except: pass
def has_children(self): return bool(self.child)
def num_children(self): return int(self.child)
def get_child_index(self, name): return 0 if self.child and (name == 'child' or name == '?') else -1
def get_child_at_index(self, index): return self.child if self.child and index == 0 else None
def zig_Optional_SummaryProvider(value, _=None):
child = value.GetChildMemberWithName('child')
return child or 'null'
class zig_ErrorUnion_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.error_set = self.value.GetChildMemberWithName('tag').Clone('error_set')
self.payload = self.value.GetChildMemberWithName('value').Clone('payload') if self.error_set.unsigned == 0 else None
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == ('payload' if self.payload else 'error_set') else -1
def get_child_at_index(self, index): return self.payload or self.error_set if index == 0 else None
class zig_TaggedUnion_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.tag = self.value.GetChildMemberWithName('tag')
self.payload = self.value.GetChildMemberWithName('payload').GetChildMemberWithName(self.tag.value)
except: pass
def has_children(self): return True
def num_children(self): return 1 + (self.payload is not None)
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
# Define Zig Standard Library
class std_SegmentedList_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.prealloc_segment = self.value.GetChildMemberWithName('prealloc_segment')
self.dynamic_segments = zig_Slice_SynthProvider(self.value.GetChildMemberWithName('dynamic_segments'))
self.dynamic_segments.update()
self.len = self.value.GetChildMemberWithName('len').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
prealloc_item_count = len(self.prealloc_segment)
if index < prealloc_item_count: return self.prealloc_segment.child[index]
prealloc_exp = prealloc_item_count.bit_length() - 1
shelf_index = log2_int(index + 1) if prealloc_item_count == 0 else log2_int(index + prealloc_item_count) - prealloc_exp - 1
shelf = self.dynamic_segments.get_child_at_index(shelf_index)
box_index = (index + 1) - (1 << shelf_index) if prealloc_item_count == 0 else index + prealloc_item_count - (1 << ((prealloc_exp + 1) + shelf_index))
elem_type = shelf.type.GetPointeeType()
return shelf.CreateChildAtOffset('[%d]' % index, box_index * elem_type.size, elem_type)
except: return None
class std_MultiArrayList_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.len = 0
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.entry_type = ptr_entry_type.GetPointeeType()
self.bytes = self.value.GetChildMemberWithName('bytes')
self.len = self.value.GetChildMemberWithName('len').unsigned
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
offset = 0
data = lldb.SBData()
for field in self.entry_type.fields:
field_type = field.type.GetPointeeType()
field_size = field_type.size
data.Append(self.bytes.CreateChildAtOffset(field.name, offset + index * field_size, field_type).address_of.data)
offset += self.capacity * field_size
return self.bytes.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
class std_MultiArrayList_Slice_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.len = 0
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.fields = {member.name: index for index, member in enumerate(ptr_field_type.GetPointeeType().enum_members)}
self.entry_type = ptr_entry_type.GetPointeeType()
self.ptrs = self.value.GetChildMemberWithName('ptrs')
self.len = self.value.GetChildMemberWithName('len').unsigned
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
except: pass
def has_children(self): return True
def num_children(self): return self.len
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
if index not in range(self.len): return None
data = lldb.SBData()
for field in self.entry_type.fields:
field_type = field.type.GetPointeeType()
data.Append(self.ptrs.child[self.fields[field.name.removesuffix('_ptr')]].CreateChildAtOffset(field.name, index * field_type.size, field_type).address_of.data)
return self.ptrs.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
class std_HashMapUnmanaged_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.capacity = 0
self.indices = tuple()
self.metadata = self.value.GetChildMemberWithName('metadata')
if not self.metadata.unsigned: return
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_hdr_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == value_type: break
else: return
self.entry_type = ptr_entry_type.GetPointeeType()
hdr_type = ptr_hdr_type.GetPointeeType()
hdr = self.metadata.CreateValueFromAddress('header', self.metadata.deref.load_addr - hdr_type.size, hdr_type)
self.values = hdr.GetChildMemberWithName('values')
self.keys = hdr.GetChildMemberWithName('keys')
self.capacity = hdr.GetChildMemberWithName('capacity').unsigned
self.indices = tuple(i for i, value in enumerate(self.metadata.GetPointeeData(0, self.capacity).sint8) if value < 0)
except: pass
def has_children(self): return True
def num_children(self): return len(self.indices)
def get_capacity(self): return self.capacity
def get_child_index(self, name):
try: return int(name.removeprefix('[').removesuffix(']'))
except: return -1
def get_child_at_index(self, index):
try:
fields = {name: base.CreateChildAtOffset(name, self.indices[index] * pointee_type.size, pointee_type).address_of.data for name, base, pointee_type in ((name, base, base.type.GetPointeeType()) for name, base in (('key_ptr', self.keys), ('value_ptr', self.values)))}
data = lldb.SBData()
for field in self.entry_type.fields: data.Append(fields[field.name])
return self.metadata.CreateValueFromData('[%d]' % index, data, self.entry_type)
except: return None
def std_HashMapUnmanaged_SummaryProvider(value, _=None):
synth = std_HashMapUnmanaged_SynthProvider(value.GetNonSyntheticValue(), _)
synth.update()
return 'len=%d capacity=%d' % (synth.num_children(), synth.get_capacity())
# formats a struct of fields of the form `name_ptr: *Type` by auto dereferencing its fields
class std_Entry_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.children = tuple(child.Clone(child.name.removesuffix('_ptr')) for child in self.value.children if child.type.GetPointeeType().size != 0)
self.indices = {child.name: i for i, child in enumerate(self.children)}
except: pass
def has_children(self): return self.num_children() != 0
def num_children(self): return len(self.children)
def get_child_index(self, name): return self.indices.get(name)
def get_child_at_index(self, index): return self.children[index].deref if index in range(len(self.children)) else None
# Define Zig Stage2 Compiler
class TagAndPayload_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
self.tag = self.value.GetChildMemberWithName('tag') or self.value.GetChildMemberWithName('tag_ptr').deref.Clone('tag')
data = self.value.GetChildMemberWithName('data_ptr') or self.value.GetChildMemberWithName('data')
self.payload = data.GetChildMemberWithName('payload').GetChildMemberWithName(data.GetChildMemberWithName('tag').value)
except: pass
def has_children(self): return True
def num_children(self): return 2
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
def InstRef_SummaryProvider(value, _=None):
return value if any(value.unsigned == member.unsigned for member in value.type.enum_members) else (
'InternPool.Index(%d)' % value.unsigned if value.unsigned < 0x80000000 else 'instructions[%d]' % (value.unsigned - 0x80000000))
def InstIndex_SummaryProvider(value, _=None):
return 'instructions[%d]' % value.unsigned
class zig_DeclIndex_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
ip = InternPool_Find(self.value.thread)
if not ip: return
self.ptr = ip.GetChildMemberWithName('allocated_decls').GetChildAtIndex(self.value.unsigned).address_of.Clone('decl')
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == 'decl' else -1
def get_child_at_index(self, index): return self.ptr if index == 0 else None
class Module_Namespace__Module_Namespace_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
ip = InternPool_Find(self.value.thread)
if not ip: return
self.ptr = ip.GetChildMemberWithName('allocated_namespaces').GetChildAtIndex(self.value.unsigned).address_of.Clone('namespace')
except: pass
def has_children(self): return True
def num_children(self): return 1
def get_child_index(self, name): return 0 if name == 'namespace' else -1
def get_child_at_index(self, index): return self.ptr if index == 0 else None
class TagOrPayloadPtr_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
value_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_tag_to_payload_map_type = helper.function.type.GetFunctionArgumentTypes()
self_type = ptr_self_type.GetPointeeType()
if self_type == value_type: break
else: return
tag_to_payload_map = {field.name: field.type for field in ptr_tag_to_payload_map_type.GetPointeeType().fields}
tag = self.value.GetChildMemberWithName('tag_if_small_enough')
if tag.unsigned < page_size:
self.tag = tag.Clone('tag')
self.payload = None
else:
ptr_otherwise = self.value.GetChildMemberWithName('ptr_otherwise')
self.tag = ptr_otherwise.GetChildMemberWithName('tag')
self.payload = ptr_otherwise.Cast(tag_to_payload_map[self.tag.value]).GetChildMemberWithName('data').Clone('payload')
except: pass
def has_children(self): return True
def num_children(self): return 1 + (self.payload is not None)
def get_child_index(self, name):
try: return ('tag', 'payload').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
def Module_Decl_name(decl):
error = lldb.SBError()
return decl.process.ReadCStringFromMemory(decl.GetChildMemberWithName('name').deref.load_addr, 256, error)
def Module_Namespace_RenderFullyQualifiedName(namespace):
parent = namespace.GetChildMemberWithName('parent')
if parent.unsigned < page_size: return zig_String_decode(namespace.GetChildMemberWithName('file_scope').GetChildMemberWithName('sub_file_path')).removesuffix('.zig').replace('/', '.')
return '.'.join((Module_Namespace_RenderFullyQualifiedName(parent), Module_Decl_name(namespace.GetChildMemberWithName('ty').GetChildMemberWithName('payload').GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))))
def Module_Decl_RenderFullyQualifiedName(decl): return '.'.join((Module_Namespace_RenderFullyQualifiedName(decl.GetChildMemberWithName('src_namespace')), Module_Decl_name(decl)))
def OwnerDecl_RenderFullyQualifiedName(payload): return Module_Decl_RenderFullyQualifiedName(payload.GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))
def InternPool_Find(thread):
for frame in thread:
ip = frame.FindVariable('ip') or frame.FindVariable('intern_pool')
if ip: return ip
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
if mod:
ip = mod.GetChildMemberWithName('intern_pool')
if ip: return ip
class InternPool_Index_SynthProvider:
def __init__(self, value, _=None): self.value = value
def update(self):
try:
index_type = self.value.type
for helper in self.value.target.FindFunctions('%s.dbHelper' % index_type.name, lldb.eFunctionNameTypeFull):
ptr_self_type, ptr_tag_to_encoding_map_type = helper.function.type.GetFunctionArgumentTypes()
if ptr_self_type.GetPointeeType() == index_type: break
else: return
tag_to_encoding_map = {field.name: field.type for field in ptr_tag_to_encoding_map_type.GetPointeeType().fields}
ip = InternPool_Find(self.value.thread)
if not ip: return
self.item = ip.GetChildMemberWithName('items').GetChildAtIndex(self.value.unsigned)
extra = ip.GetChildMemberWithName('extra').GetChildMemberWithName('items')
self.tag = self.item.GetChildMemberWithName('tag').Clone('tag')
self.data = None
self.trailing = None
data = self.item.GetChildMemberWithName('data')
encoding_type = tag_to_encoding_map[self.tag.value]
dynamic_values = {}
for encoding_field in encoding_type.fields:
if encoding_field.name == 'data':
if encoding_field.type.IsPointerType():
extra_index = data.unsigned
self.data = extra.GetChildAtIndex(extra_index).address_of.Cast(encoding_field.type).deref.Clone('data')
extra_index += encoding_field.type.GetPointeeType().num_fields
else:
self.data = data.Cast(encoding_field.type).Clone('data')
elif encoding_field.name == 'trailing':
trailing_data = lldb.SBData()
for trailing_field in encoding_field.type.fields:
trailing_data.Append(extra.GetChildAtIndex(extra_index).address_of.data)
trailing_len = dynamic_values['trailing.%s.len' % trailing_field.name].unsigned
trailing_data.Append(lldb.SBData.CreateDataFromInt(trailing_len, trailing_data.GetAddressByteSize()))
extra_index += trailing_len
self.trailing = self.data.CreateValueFromData('trailing', trailing_data, encoding_field.type)
else:
for path in encoding_field.type.GetPointeeType().name.removeprefix('%s::' % encoding_type.name).removeprefix('%s.' % encoding_type.name).partition('__')[0].split(' orelse '):
if path.startswith('data.'):
root = self.data
path = path[len('data'):]
else: return
dynamic_value = root.GetValueForExpressionPath(path)
if dynamic_value:
dynamic_values[encoding_field.name] = dynamic_value
break
except: pass
def has_children(self): return True
def num_children(self): return 2 + (self.trailing is not None)
def get_child_index(self, name):
try: return ('tag', 'data', 'trailing').index(name)
except: return -1
def get_child_at_index(self, index): return (self.tag, self.data, self.trailing)[index] if index in range(3) else None
def InternPool_NullTerminatedString_SummaryProvider(value, _=None):
try:
ip = InternPool_Find(value.thread)
if not ip: return
items = ip.GetChildMemberWithName('string_bytes').GetChildMemberWithName('items')
b = bytearray()
i = 0
while True:
x = items.GetChildAtIndex(value.unsigned + i).GetValueAsUnsigned()
if x == 0: break
b.append(x)
i += 1
s = b.decode(encoding='utf8', errors='backslashreplace')
s1 = s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
return '"%s"' % s1
except:
pass
def type_Type_pointer(payload):
pointee_type = payload.GetChildMemberWithName('pointee_type')
sentinel = payload.GetChildMemberWithName('sentinel').GetChildMemberWithName('child')
align = payload.GetChildMemberWithName('align').unsigned
addrspace = payload.GetChildMemberWithName('addrspace').value
bit_offset = payload.GetChildMemberWithName('bit_offset').unsigned
host_size = payload.GetChildMemberWithName('host_size').unsigned
vector_index = payload.GetChildMemberWithName('vector_index')
allowzero = payload.GetChildMemberWithName('allowzero').unsigned
const = not payload.GetChildMemberWithName('mutable').unsigned
volatile = payload.GetChildMemberWithName('volatile').unsigned
size = payload.GetChildMemberWithName('size').value
if size == 'One': summary = '*'
elif size == 'Many': summary = '[*'
elif size == 'Slice': summary = '['
elif size == 'C': summary = '[*c'
if sentinel: summary += ':%s' % value_Value_SummaryProvider(sentinel)
if size != 'One': summary += ']'
if allowzero: summary += 'allowzero '
if align != 0 or host_size != 0 or vector_index.value != 'none': summary += 'align(%d%s%s) ' % (align, ':%d:%d' % (bit_offset, host_size) if bit_offset != 0 or host_size != 0 else '', ':?' if vector_index.value == 'runtime' else ':%d' % vector_index.unsigned if vector_index.value != 'none' else '')
if addrspace != 'generic': summary += 'addrspace(.%s) ' % addrspace
if const: summary += 'const '
if volatile: summary += 'volatile '
summary += type_Type_SummaryProvider(pointee_type)
return summary
def type_Type_function(payload):
param_types = payload.GetChildMemberWithName('param_types').children
comptime_params = payload.GetChildMemberWithName('comptime_params').GetPointeeData(0, len(param_types)).uint8
return_type = payload.GetChildMemberWithName('return_type')
alignment = payload.GetChildMemberWithName('alignment').unsigned
noalias_bits = payload.GetChildMemberWithName('noalias_bits').unsigned
cc = payload.GetChildMemberWithName('cc').value
is_var_args = payload.GetChildMemberWithName('is_var_args').unsigned
return 'fn(%s)%s%s %s' % (', '.join(tuple(''.join(('comptime ' if comptime_param else '', 'noalias ' if noalias_bits & 1 << i else '', type_Type_SummaryProvider(param_type))) for i, (comptime_param, param_type) in enumerate(zip(comptime_params, param_types))) + (('...',) if is_var_args else ())), ' align(%d)' % alignment if alignment != 0 else '', ' callconv(.%s)' % cc if cc != 'Unspecified' else '', type_Type_SummaryProvider(return_type))
def type_Type_SummaryProvider(value, _=None):
tag = value.GetChildMemberWithName('tag').value
return type_tag_handlers.get(tag, lambda payload: tag)(value.GetChildMemberWithName('payload'))
type_tag_handlers = {
'atomic_order': lambda payload: 'std.builtin.AtomicOrder',
'atomic_rmw_op': lambda payload: 'std.builtin.AtomicRmwOp',
'calling_convention': lambda payload: 'std.builtin.CallingConvention',
'address_space': lambda payload: 'std.builtin.AddressSpace',
'float_mode': lambda payload: 'std.builtin.FloatMode',
'reduce_op': lambda payload: 'std.builtin.ReduceOp',
'modifier': lambda payload: 'std.builtin.CallModifier',
'prefetch_options': lambda payload: 'std.builtin.PrefetchOptions',
'export_options': lambda payload: 'std.builtin.ExportOptions',
'extern_options': lambda payload: 'std.builtin.ExternOptions',
'type_info': lambda payload: 'std.builtin.Type',
'enum_literal': lambda payload: '@TypeOf(.enum_literal)',
'null': lambda payload: '@TypeOf(null)',
'undefined': lambda payload: '@TypeOf(undefined)',
'empty_struct_literal': lambda payload: '@TypeOf(.{})',
'anyerror_void_error_union': lambda payload: 'anyerror!void',
'slice_const_u8': lambda payload: '[]const u8',
'slice_const_u8_sentinel_0': lambda payload: '[:0]const u8',
'fn_noreturn_no_args': lambda payload: 'fn() noreturn',
'fn_void_no_args': lambda payload: 'fn() void',
'fn_naked_noreturn_no_args': lambda payload: 'fn() callconv(.Naked) noreturn',
'fn_ccc_void_no_args': lambda payload: 'fn() callconv(.C) void',
'single_const_pointer_to_comptime_int': lambda payload: '*const comptime_int',
'manyptr_u8': lambda payload: '[*]u8',
'manyptr_const_u8': lambda payload: '[*]const u8',
'manyptr_const_u8_sentinel_0': lambda payload: '[*:0]const u8',
'function': type_Type_function,
'error_union': lambda payload: '%s!%s' % (type_Type_SummaryProvider(payload.GetChildMemberWithName('error_set')), type_Type_SummaryProvider(payload.GetChildMemberWithName('payload'))),
'array_u8': lambda payload: '[%d]u8' % payload.unsigned,
'array_u8_sentinel_0': lambda payload: '[%d:0]u8' % payload.unsigned,
'vector': lambda payload: '@Vector(%d, %s)' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'array': lambda payload: '[%d]%s' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'array_sentinel': lambda payload: '[%d:%s]%s' % (payload.GetChildMemberWithName('len').unsigned, value_Value_SummaryProvider(payload.GetChildMemberWithName('sentinel')), type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
'tuple': lambda payload: 'tuple{%s}' % ', '.join(('comptime %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s') % type_Type_SummaryProvider(type) for type, value in zip(payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
'anon_struct': lambda payload: 'struct{%s}' % ', '.join(('comptime %%s: %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s: %s') % (zig_String_AsIdentifier(name, zig_IsFieldName), type_Type_SummaryProvider(type)) for name, type, value in zip(payload.GetChildMemberWithName('names').children, payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
'pointer': type_Type_pointer,
'single_const_pointer': lambda payload: '*const %s' % type_Type_SummaryProvider(payload),
'single_mut_pointer': lambda payload: '*%s' % type_Type_SummaryProvider(payload),
'many_const_pointer': lambda payload: '[*]const %s' % type_Type_SummaryProvider(payload),
'many_mut_pointer': lambda payload: '[*]%s' % type_Type_SummaryProvider(payload),
'c_const_pointer': lambda payload: '[*c]const %s' % type_Type_SummaryProvider(payload),
'c_mut_pointer': lambda payload: '[*c]%s' % type_Type_SummaryProvider(payload),
'slice_const': lambda payload: '[]const %s' % type_Type_SummaryProvider(payload),
'mut_slice': lambda payload: '[]%s' % type_Type_SummaryProvider(payload),
'int_signed': lambda payload: 'i%d' % payload.unsigned,
'int_unsigned': lambda payload: 'u%d' % payload.unsigned,
'optional': lambda payload: '?%s' % type_Type_SummaryProvider(payload),
'optional_single_mut_pointer': lambda payload: '?*%s' % type_Type_SummaryProvider(payload),
'optional_single_const_pointer': lambda payload: '?*const %s' % type_Type_SummaryProvider(payload),
'anyframe_T': lambda payload: 'anyframe->%s' % type_Type_SummaryProvider(payload),
'error_set': lambda payload: type_tag_handlers['error_set_merged'](payload.GetChildMemberWithName('names')),
'error_set_single': lambda payload: 'error{%s}' % zig_String_AsIdentifier(payload, zig_IsFieldName),
'error_set_merged': lambda payload: 'error{%s}' % ','.join(zig_String_AsIdentifier(child.GetChildMemberWithName('key'), zig_IsFieldName) for child in payload.GetChildMemberWithName('entries').children),
'error_set_inferred': lambda payload: '@typeInfo(@typeInfo(@TypeOf(%s)).@"fn".return_type.?).error_union.error_set' % OwnerDecl_RenderFullyQualifiedName(payload.GetChildMemberWithName('func')),
'enum_full': OwnerDecl_RenderFullyQualifiedName,
'enum_nonexhaustive': OwnerDecl_RenderFullyQualifiedName,
'enum_numbered': OwnerDecl_RenderFullyQualifiedName,
'enum_simple': OwnerDecl_RenderFullyQualifiedName,
'struct': OwnerDecl_RenderFullyQualifiedName,
'union': OwnerDecl_RenderFullyQualifiedName,
'union_safety_tagged': OwnerDecl_RenderFullyQualifiedName,
'union_tagged': OwnerDecl_RenderFullyQualifiedName,
'opaque': OwnerDecl_RenderFullyQualifiedName,
}
def value_Value_str_lit(payload):
for frame in payload.thread:
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
if mod: break
else: return
return '"%s"' % zig_String_decode(mod.GetChildMemberWithName('string_literal_bytes').GetChildMemberWithName('items'), payload.GetChildMemberWithName('index').unsigned, payload.GetChildMemberWithName('len').unsigned)
def value_Value_SummaryProvider(value, _=None):
tag = value.GetChildMemberWithName('tag').value
return value_tag_handlers.get(tag, lambda payload: tag.removesuffix('_type'))(value.GetChildMemberWithName('payload'))
value_tag_handlers = {
'undef': lambda payload: 'undefined',
'zero': lambda payload: '0',
'one': lambda payload: '1',
'void_value': lambda payload: '{}',
'unreachable_value': lambda payload: 'unreachable',
'null_value': lambda payload: 'null',
'bool_true': lambda payload: 'true',
'bool_false': lambda payload: 'false',
'empty_struct_value': lambda payload: '.{}',
'empty_array': lambda payload: '.{}',
'ty': type_Type_SummaryProvider,
'int_type': lambda payload: '%c%d' % (payload.GetChildMemberWithName('bits').unsigned, 's' if payload.GetChildMemberWithName('signed').unsigned == 1 else 'u'),
'int_u64': lambda payload: '%d' % payload.unsigned,
'int_i64': lambda payload: '%d' % payload.signed,
'int_big_positive': lambda payload: sum(child.unsigned << i * child.type.size * 8 for i, child in enumerate(payload.children)),
'int_big_negative': lambda payload: '-%s' % value_tag_handlers['int_big_positive'](payload),
'function': OwnerDecl_RenderFullyQualifiedName,
'extern_fn': OwnerDecl_RenderFullyQualifiedName,
'variable': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'runtime_value': value_Value_SummaryProvider,
'decl_ref': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'decl_ref_mut': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl_index').GetChildMemberWithName('decl').GetChildMemberWithName('val')),
'comptime_field_ptr': lambda payload: '&%s' % value_Value_SummaryProvider(payload.GetChildMemberWithName('field_val')),
'elem_ptr': lambda payload: '(%s)[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('array_ptr')), payload.GetChildMemberWithName('index').unsigned),
'field_ptr': lambda payload: '(%s).field[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')), payload.GetChildMemberWithName('field_index').unsigned),
'bytes': lambda payload: '"%s"' % zig_String_decode(payload),
'str_lit': value_Value_str_lit,
'repeated': lambda payload: '.{%s} ** _' % value_Value_SummaryProvider(payload),
'empty_array_sentinel': lambda payload: '.{%s}' % value_Value_SummaryProvider(payload),
'slice': lambda payload: '(%s)[0..%s]' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('ptr', 'len')),
'float_16': lambda payload: payload.value,
'float_32': lambda payload: payload.value,
'float_64': lambda payload: payload.value,
'float_80': lambda payload: payload.value,
'float_128': lambda payload: payload.value,
'enum_literal': lambda payload: '.%s' % zig_String_AsIdentifier(payload, zig_IsFieldName),
'enum_field_index': lambda payload: 'field[%d]' % payload.unsigned,
'error': lambda payload: 'error.%s' % zig_String_AsIdentifier(payload.GetChildMemberWithName('name'), zig_IsFieldName),
'eu_payload': value_Value_SummaryProvider,
'eu_payload_ptr': lambda payload: '&((%s).* catch unreachable)' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
'opt_payload': value_Value_SummaryProvider,
'opt_payload_ptr': lambda payload: '&(%s).*.?' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
'aggregate': lambda payload: '.{%s}' % ', '.join(map(value_Value_SummaryProvider, payload.children)),
'union': lambda payload: '.{.%s = %s}' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('tag', 'val')),
'lazy_align': lambda payload: '@alignOf(%s)' % type_Type_SummaryProvider(payload),
'lazy_size': lambda payload: '@sizeOf(%s)' % type_Type_SummaryProvider(payload),
}
# Initialize
def add(debugger, *, category, regex=False, type, identifier=None, synth=False, inline_children=False, expand=False, summary=False):
prefix = '.'.join((__name__, (identifier or type).replace('.', '_').replace(':', '_')))
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
def __lldb_init_module(debugger, _=None):
# Initialize Zig Categories
debugger.HandleCommand('type category define --language c99 zig.lang zig.std')
# Initialize Zig Language
add(debugger, category='zig.lang', regex=True, type='^\\[\\]', identifier='zig_Slice', synth=True, expand=True, summary='len=${svar%#}')
add(debugger, category='zig.lang', type='[]u8', identifier='zig_String', summary=True)
add(debugger, category='zig.lang', regex=True, type='^\\?', identifier='zig_Optional', synth=True, summary=True)
add(debugger, category='zig.lang', regex=True, type='^(error{.*}|anyerror)!', identifier='zig_ErrorUnion', synth=True, inline_children=True, summary=True)
# Initialize Zig Standard Library
add(debugger, category='zig.std', type='mem.Allocator', summary='${var.ptr}')
add(debugger, category='zig.std', regex=True, type='^segmented_list\\.SegmentedList\\(.*\\)$', identifier='std_SegmentedList', synth=True, expand=True, summary='len=${var.len}')
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
add(debugger, category='zig.std', regex=True, type=MultiArrayList_Entry('.*'), identifier='std_Entry', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)$', identifier='std_HashMapUnmanaged', synth=True, expand=True, summary=True)
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)\\.Entry$', identifier = 'std_Entry', synth=True, inline_children=True, summary=True)
# Initialize Zig Stage2 Compiler
add(debugger, category='zig.stage2', type='Zir.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Zir\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type='^Zir\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Ref', identifier='InstRef', summary=True)
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Index', identifier='InstIndex', summary=True)
add(debugger, category='zig.stage2', type='Air.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Ref', identifier='InstRef', summary=True)
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Index', identifier='InstIndex', summary=True)
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Air\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
add(debugger, category='zig.stage2', regex=True, type='^Air\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
add(debugger, category='zig.stage2', type='zig.DeclIndex', synth=True)
add(debugger, category='zig.stage2', type='Module.Namespace::Module.Namespace.Index', synth=True)
add(debugger, category='zig.stage2', type='Module.LazySrcLoc', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Index', synth=True)
add(debugger, category='zig.stage2', type='InternPool.NullTerminatedString', summary=True)
add(debugger, category='zig.stage2', type='InternPool.Key', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Int.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.ErrorUnion.Value', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Float.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Ptr.Addr', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='InternPool.Key.Aggregate.Storage', identifier='zig_TaggedUnion', synth=True)
add(debugger, category='zig.stage2', type='arch.x86_64.CodeGen.MCValue', identifier='zig_TaggedUnion', synth=True, inline_children=True, summary=True)

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "bun",
"version": "1.1.41",
"version": "1.1.39",
"workspaces": [
"./packages/bun-types"
],
@@ -21,7 +21,7 @@
"react": "^18.3.1",
"react-dom": "^18.3.1",
"source-map-js": "^1.2.0",
"typescript": "^5.7.2",
"typescript": "^5.4.5",
"caniuse-lite": "^1.0.30001620",
"autoprefixer": "^10.4.19",
"@mdn/browser-compat-data": "~5.5.28"
@@ -30,8 +30,8 @@
"bun-types": "workspace:packages/bun-types"
},
"scripts": {
"build": "bun run build:debug",
"build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug",
"build:debug": "bun run build",
"build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind",
"build:release": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -B build/release",
"build:ci": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_VERBOSE_MAKEFILE=ON -DCI=true -B build/release-ci --verbose --fresh",
@@ -39,8 +39,8 @@
"build:logs": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=ON -B build/release-logs",
"build:safe": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=ReleaseSafe -B build/release-safe",
"build:smol": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=MinSizeRel -B build/release-smol",
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug-local",
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release-local",
"build:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DWEBKIT_LOCAL=ON -B build/debug",
"build:release:local": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Release -DWEBKIT_LOCAL=ON -B build/release",
"build:release:with_logs": "cmake . -DCMAKE_BUILD_TYPE=Release -DENABLE_LOGS=true -GNinja -Bbuild-release && ninja -Cbuild-release",
"build:debug-zig-release": "cmake . -DCMAKE_BUILD_TYPE=Release -DZIG_OPTIMIZE=Debug -GNinja -Bbuild-debug-zig-release && ninja -Cbuild-debug-zig-release",
"css-properties": "bun run src/css/properties/generate_properties.ts",
@@ -73,7 +73,6 @@
"prettier": "bun run analysis:no-llvm --target prettier",
"prettier:check": "bun run analysis:no-llvm --target prettier-check",
"prettier:extra": "bun run analysis:no-llvm --target prettier-extra",
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff",
"node:test": "node ./scripts/runner.node.mjs --quiet --exec-path=$npm_execpath --node-tests "
"prettier:diff": "bun run analysis:no-llvm --target prettier-diff"
}
}

View File

@@ -1,5 +0,0 @@
[target.aarch64-unknown-linux-musl]
linker = "aarch64-linux-musl-gcc"
rustflags = ["-C", "target-feature=-crt-static"]
[target.x86_64-pc-windows-msvc]
rustflags = ["-C", "target-feature=+crt-static"]

View File

@@ -1,202 +0,0 @@
# Created by https://www.toptal.com/developers/gitignore/api/node
# Edit at https://www.toptal.com/developers/gitignore?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# Next.js build output
.next
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# End of https://www.toptal.com/developers/gitignore/api/node
# Created by https://www.toptal.com/developers/gitignore/api/macos
# Edit at https://www.toptal.com/developers/gitignore?templates=macos
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
# End of https://www.toptal.com/developers/gitignore/api/macos
# Created by https://www.toptal.com/developers/gitignore/api/windows
# Edit at https://www.toptal.com/developers/gitignore?templates=windows
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/windows
#Added by cargo
/target
Cargo.lock
.pnp.*
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
*.node
dist/
index.js
index.d.ts

View File

@@ -1,13 +0,0 @@
target
Cargo.lock
.cargo
.github
npm
.eslintrc
.prettierignore
rustfmt.toml
yarn.lock
*.node
.yarn
__test__
renovate.json

View File

@@ -1,21 +0,0 @@
[package]
edition = "2021"
name = "bun-mdx-rs"
version = "0.0.0"
[lib]
crate-type = ["cdylib"]
[dependencies]
# Default enable napi4 feature, see https://nodejs.org/api/n-api.html#node-api-version-matrix
napi = { version = "2.12.2", default-features = false, features = ["napi4"] }
napi-derive = "2.12.2"
mdxjs = "0.2.11"
bun-native-plugin = { path = "../bun-native-plugin-rs" }
[build-dependencies]
napi-build = "2.0.1"
[profile.release]
lto = true
strip = "symbols"

View File

@@ -1,34 +0,0 @@
# bun-build-mdx-rs
This is a proof of concept for using a third-party native addon in `Bun.build()`.
This uses `mdxjs-rs` to convert MDX to JSX.
TODO: **This needs to be built & published to npm.**
## Building locally:
```sh
cargo build --release
```
```js
import { build } from "bun";
import mdx from "./index.js";
// TODO: This needs to be prebuilt for the current platform
// Probably use a napi-rs template for this
import addon from "./target/release/libmdx_bun.dylib" with { type: "file" };
const results = await build({
entrypoints: ["./hello.jsx"],
plugins: [mdx({ addon })],
minify: true,
outdir: "./dist",
define: {
"process.env.NODE_ENV": JSON.stringify("production"),
},
});
console.log(results);
```

View File

@@ -1,7 +0,0 @@
import test from 'ava'
import { sum } from '../index.js'
test('sum from native', (t) => {
t.is(sum(1, 2), 3)
})

View File

@@ -1,5 +0,0 @@
extern crate napi_build;
fn main() {
napi_build::setup();
}

View File

@@ -1,6 +0,0 @@
import page1 from "./page1.mdx";
import page2 from "./page2.mdx";
import page3 from "./page3.mdx";
import page4 from "./page4.mdx";
console.log(page1, page2, page3, page4);

View File

@@ -1,11 +0,0 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -1,11 +0,0 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -1,11 +0,0 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -1,11 +0,0 @@
# Hello World
This is a sample MDX file that demonstrates various MDX features.
## Components
You can use JSX components directly in MDX:
<Button onClick={() => alert("Hello!")}>Click me</Button>
## Code Blocks

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-darwin-arm64`
This is the **aarch64-apple-darwin** binary for `bun-mdx-rs`

View File

@@ -1,18 +0,0 @@
{
"name": "bun-mdx-rs-darwin-arm64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.darwin-arm64.node",
"files": [
"bun-mdx-rs.darwin-arm64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-darwin-x64`
This is the **x86_64-apple-darwin** binary for `bun-mdx-rs`

View File

@@ -1,18 +0,0 @@
{
"name": "bun-mdx-rs-darwin-x64",
"version": "0.0.0",
"os": [
"darwin"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.darwin-x64.node",
"files": [
"bun-mdx-rs.darwin-x64.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-linux-arm64-gnu`
This is the **aarch64-unknown-linux-gnu** binary for `bun-mdx-rs`

View File

@@ -1,21 +0,0 @@
{
"name": "bun-mdx-rs-linux-arm64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.linux-arm64-gnu.node",
"files": [
"bun-mdx-rs.linux-arm64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-linux-arm64-musl`
This is the **aarch64-unknown-linux-musl** binary for `bun-mdx-rs`

View File

@@ -1,21 +0,0 @@
{
"name": "bun-mdx-rs-linux-arm64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"arm64"
],
"main": "bun-mdx-rs.linux-arm64-musl.node",
"files": [
"bun-mdx-rs.linux-arm64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-linux-x64-gnu`
This is the **x86_64-unknown-linux-gnu** binary for `bun-mdx-rs`

View File

@@ -1,21 +0,0 @@
{
"name": "bun-mdx-rs-linux-x64-gnu",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.linux-x64-gnu.node",
"files": [
"bun-mdx-rs.linux-x64-gnu.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"glibc"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-linux-x64-musl`
This is the **x86_64-unknown-linux-musl** binary for `bun-mdx-rs`

View File

@@ -1,21 +0,0 @@
{
"name": "bun-mdx-rs-linux-x64-musl",
"version": "0.0.0",
"os": [
"linux"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.linux-x64-musl.node",
"files": [
"bun-mdx-rs.linux-x64-musl.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
},
"libc": [
"musl"
]
}

View File

@@ -1,3 +0,0 @@
# `bun-mdx-rs-win32-x64-msvc`
This is the **x86_64-pc-windows-msvc** binary for `bun-mdx-rs`

View File

@@ -1,18 +0,0 @@
{
"name": "bun-mdx-rs-win32-x64-msvc",
"version": "0.0.0",
"os": [
"win32"
],
"cpu": [
"x64"
],
"main": "bun-mdx-rs.win32-x64-msvc.node",
"files": [
"bun-mdx-rs.win32-x64-msvc.node"
],
"license": "MIT",
"engines": {
"node": ">= 10"
}
}

View File

@@ -1,37 +0,0 @@
{
"name": "bun-mdx-rs",
"version": "0.0.0",
"main": "index.js",
"types": "index.d.ts",
"napi": {
"name": "bun-mdx-rs",
"triples": {
"additional": [
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"x86_64-unknown-linux-musl"
]
}
},
"license": "MIT",
"devDependencies": {
"@napi-rs/cli": "^2.18.4",
"ava": "^6.0.1"
},
"ava": {
"timeout": "3m"
},
"engines": {
"node": ">= 10"
},
"scripts": {
"artifacts": "napi artifacts",
"build": "napi build --platform --release",
"build:debug": "napi build --platform",
"prepublishOnly": "napi prepublish -t npm",
"test": "ava",
"universal": "napi universal",
"version": "napi version"
}
}

View File

@@ -1,2 +0,0 @@
tab_spaces = 2
edition = "2021"

View File

@@ -1,25 +0,0 @@
use bun_native_plugin::{anyhow, bun, define_bun_plugin, BunLoader, Result};
use mdxjs::{compile, Options as CompileOptions};
use napi_derive::napi;
define_bun_plugin!("bun-mdx-rs");
#[bun]
pub fn bun_mdx_rs(handle: &mut OnBeforeParse) -> Result<()> {
let source_str = handle.input_source_code()?;
let mut options = CompileOptions::gfm();
// Leave it as JSX for Bun to handle
options.jsx = true;
let path = handle.path()?;
options.filepath = Some(path.to_string());
let jsx = compile(&source_str, &options)
.map_err(|e| anyhow::anyhow!("Failed to compile MDX: {:?}", e))?;
handle.set_output_source_code(jsx, BunLoader::BUN_LOADER_JSX);
Ok(())
}

View File

@@ -116,7 +116,6 @@ try {
entrypoints: [join(import.meta.dir, "out/manifest.js")],
outdir: "out",
minify: true,
throw: true,
});
const jsFilename = "manifest-" + jsBundle.outputs[0].hash + ".js";
// const cssBundle = await build({

View File

@@ -1,73 +0,0 @@
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
#include <stddef.h>
#include <stdint.h>
typedef enum {
BUN_LOADER_JSX = 0,
BUN_LOADER_JS = 1,
BUN_LOADER_TS = 2,
BUN_LOADER_TSX = 3,
BUN_LOADER_CSS = 4,
BUN_LOADER_FILE = 5,
BUN_LOADER_JSON = 6,
BUN_LOADER_TOML = 7,
BUN_LOADER_WASM = 8,
BUN_LOADER_NAPI = 9,
BUN_LOADER_BASE64 = 10,
BUN_LOADER_DATAURL = 11,
BUN_LOADER_TEXT = 12,
} BunLoader;
const BunLoader BUN_LOADER_MAX = BUN_LOADER_TEXT;
typedef struct BunLogOptions {
size_t __struct_size;
const uint8_t *message_ptr;
size_t message_len;
const uint8_t *path_ptr;
size_t path_len;
const uint8_t *source_line_text_ptr;
size_t source_line_text_len;
int8_t level;
int line;
int lineEnd;
int column;
int columnEnd;
} BunLogOptions;
typedef struct {
size_t __struct_size;
void *bun;
const uint8_t *path_ptr;
size_t path_len;
const uint8_t *namespace_ptr;
size_t namespace_len;
uint8_t default_loader;
void *external;
} OnBeforeParseArguments;
typedef struct OnBeforeParseResult {
size_t __struct_size;
uint8_t *source_ptr;
size_t source_len;
uint8_t loader;
int (*fetchSourceCode)(const OnBeforeParseArguments *args,
struct OnBeforeParseResult *result);
void *plugin_source_code_context;
void (*free_plugin_source_code_context)(void *ctx);
void (*log)(const OnBeforeParseArguments *args, BunLogOptions *options);
} OnBeforeParseResult;
typedef enum {
BUN_LOG_LEVEL_VERBOSE = 0,
BUN_LOG_LEVEL_DEBUG = 1,
BUN_LOG_LEVEL_INFO = 2,
BUN_LOG_LEVEL_WARN = 3,
BUN_LOG_LEVEL_ERROR = 4,
} BunLogLevel;
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H

View File

@@ -1 +0,0 @@
target/

View File

@@ -1,385 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "aho-corasick"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
dependencies = [
"memchr",
]
[[package]]
name = "anyhow"
version = "1.0.94"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
[[package]]
name = "bindgen"
version = "0.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
dependencies = [
"bitflags",
"cexpr",
"clang-sys",
"itertools",
"log",
"prettyplease",
"proc-macro2",
"quote",
"regex",
"rustc-hash",
"shlex",
"syn",
]
[[package]]
name = "bitflags"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "bun-macro"
version = "0.1.0"
dependencies = [
"anyhow",
"napi",
"quote",
"syn",
]
[[package]]
name = "bun-native-plugin"
version = "0.1.0"
dependencies = [
"anyhow",
"bindgen",
"bun-macro",
"napi",
]
[[package]]
name = "cexpr"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clang-sys"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
dependencies = [
"glob",
"libc",
"libloading",
]
[[package]]
name = "convert_case"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "ctor"
version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "either"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0"
[[package]]
name = "glob"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "itertools"
version = "0.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186"
dependencies = [
"either",
]
[[package]]
name = "libc"
version = "0.2.166"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2ccc108bbc0b1331bd061864e7cd823c0cab660bbe6970e66e2c0614decde36"
[[package]]
name = "libloading"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
dependencies = [
"cfg-if",
"windows-targets",
]
[[package]]
name = "log"
version = "0.4.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "memchr"
version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "minimal-lexical"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "napi"
version = "2.16.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b"
dependencies = [
"bitflags",
"ctor",
"napi-derive",
"napi-sys",
"once_cell",
]
[[package]]
name = "napi-derive"
version = "2.16.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c"
dependencies = [
"cfg-if",
"convert_case",
"napi-derive-backend",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "napi-derive-backend"
version = "1.0.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf"
dependencies = [
"convert_case",
"once_cell",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "napi-sys"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3"
dependencies = [
"libloading",
]
[[package]]
name = "nom"
version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
"minimal-lexical",
]
[[package]]
name = "once_cell"
version = "1.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
[[package]]
name = "prettyplease"
version = "0.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033"
dependencies = [
"proc-macro2",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af"
dependencies = [
"proc-macro2",
]
[[package]]
name = "regex"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [
"aho-corasick",
"memchr",
"regex-automata",
"regex-syntax",
]
[[package]]
name = "regex-automata"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
]
[[package]]
name = "regex-syntax"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
[[package]]
name = "rustc-hash"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "syn"
version = "2.0.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d46482f1c1c87acd84dea20c1bf5ebff4c757009ed6bf19cfd36fb10e92c4e"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"

View File

@@ -1,17 +0,0 @@
[package]
name = "bun-native-plugin"
version = "0.1.0"
edition = "2021"
[build-dependencies]
bindgen = "0.70.1"
[dependencies]
anyhow = "1.0.94"
bun-macro = { path = "./bun-macro" }
napi = { version = "2.14.1", default-features = false, features = ["napi4"] }
[features]
default = ["napi"]
napi = []

View File

@@ -1,241 +0,0 @@
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
# Bun Native Plugins
This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS are:
- Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
- Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
The currently supported lifecycle hooks are:
- `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
## Getting started
Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
```bash
bun add -g @napi-rs/cli
napi new
```
Then install this crate:
```bash
cargo add bun-native-plugin
```
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
will implement our native plugin.
Here's an example implementing the `onBeforeParse` hook:
```rs
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
use napi_derive::napi;
/// Define the plugin and its name
define_bun_plugin!("replace-foo-with-bar");
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
/// `foo` with `bar`.
///
/// We use the #[bun] macro to generate some of the boilerplate code.
///
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
/// the macro that this function implements the `onBeforeParse` hook.
#[bun]
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
```
Internally, the `#[bun]` macro wraps your code and declares a C ABI function which implements
the function signature of `onBeforeParse` plugins in Bun's C API for bundler plugins.
Then it calls your code. The wrapper looks _roughly_ like this:
```rs
pub extern "C" fn replace_foo_with_bar(
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
// The actual code you wrote is inlined here
fn __replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
// Fetch the input source code.
let input_source_code = handle.input_source_code()?;
// Get the Loader for the file
let loader = handle.output_loader();
let output_source_code = input_source_code.replace("foo", "bar");
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
Ok(())
}
let args = unsafe { &*args };
let mut handle = OnBeforeParse::from_raw(args, result) {
Ok(handle) => handle,
Err(_) => {
return;
}
};
if let Err(e) = __replace_fo_with_bar(&handle) {
handle.log_err(&e.to_string());
}
}
```
Now, let's compile this NAPI module. If you're using napi-rs, the `package.json` should have a `build` script you can run:
```bash
bun run build
```
This will produce a `.node` file in the project directory.
With the compiled NAPI module, you can now register the plugin from JS:
```js
const result = await Bun.build({
entrypoints: ["index.ts"],
plugins: [
{
name: "replace-foo-with-bar",
setup(build) {
const napiModule = require("path/to/napi_module.node");
// Register the `onBeforeParse` hook to run on all `.ts` files.
// We tell it to use function we implemented inside of our `lib.rs` code.
build.onBeforeParse(
{ filter: /\.ts/ },
{ napiModule, symbol: "replace_foo_with_bar" },
);
},
},
],
});
```
## Very important information
### Error handling and panics
In the case that the value of the `Result` your plugin function returns is an `Err(...)`, the error will be logged to Bun's bundler.
It is highly advised that you return all errors and avoid `.unwrap()`'ing or `.expecting()`'ing results.
The `#[bun]` wrapper macro actually runs your code inside of a [`panic::catch_unwind`](https://doc.rust-lang.org/std/panic/fn.catch_unwind.html),
which may catch _some_ panics but **not guaranteed to catch all panics**.
Therefore, it is recommended to **avoid panics at all costs**.
### Passing state to and from JS: `External`
One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
the pointer and modify the data.
As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
that your state must be `Sync`:
```rs
struct PluginState {
foo_count: std::sync::atomic::AtomicU32,
}
#[napi]
pub fn create_plugin_state() -> External<PluginState> {
let external = External::new(PluginState {
foo_count: 0,
});
external
}
#[napi]
pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
let plugin_state: &PluginState = &plugin_state;
plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
}
```
When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
```js
const napiModule = require("path/to/napi_module.node");
const pluginState = napiModule.createPluginState();
const result = await Bun.build({
entrypoints: ["index.ts"],
plugins: [
{
name: "replace-foo-with-bar",
setup(build) {
build.onBeforeParse(
{ filter: /\.ts/ },
{
napiModule,
symbol: "on_before_parse_plugin_impl",
// pass our NAPI external which contains our plugin state here
external: pluginState,
},
);
},
},
],
});
console.log("Total `foo`s encountered: ", pluginState.getFooCount());
```
Finally, from the native implementation of your plugin, you can extract the external:
```rs
#[bun]
pub fn on_before_parse_plugin_impl(handle: &mut OnBeforeParse) {
// This operation is only safe if you pass in an external when registering the plugin.
// If you don't, this could lead to a segfault or access of undefined memory.
let plugin_state: &PluginState =
unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown))? };
// Fetch our source code again
let input_source_code = handle.input_source_code()?;
// Count the number of `foo`s and add it to our state
let foo_count = source_code.matches("foo").count() as u32;
plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
}
```
### Concurrency
Your plugin function can be called _on any thread_ at _any time_ and possibly _multiple times at once_.
Therefore, you must design any state management to be threadsafe.

View File

@@ -1,20 +0,0 @@
use std::path::PathBuf;
fn main() {
println!("cargo:rustc-link-search=./headers");
let bindings = bindgen::Builder::default()
.header("wrapper.h")
// Add absolute path to headers directory
.clang_arg("-I./headers")
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.rustified_enum("BunLogLevel")
.rustified_enum("BunLoader")
.generate()
.expect("Unable to generate bindings");
let out_path = PathBuf::from(std::env::var("OUT_DIR").unwrap());
bindings
.write_to_file(out_path.join("bindings.rs"))
.expect("Couldn't write bindings!");
}

View File

@@ -1,14 +0,0 @@
[package]
name = "bun-macro"
version = "0.1.0"
edition = "2021"
[lib]
proc-macro = true
[dependencies]
syn = { version = "2.0", features = ["full"] }
quote = "1.0"
napi = "2.16.13"
anyhow = "1.0.94"

View File

@@ -1,54 +0,0 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, Ident, ItemFn};
#[proc_macro_attribute]
pub fn bun(_attr: TokenStream, item: TokenStream) -> TokenStream {
// Parse the input function
let input_fn = parse_macro_input!(item as ItemFn);
let fn_name = &input_fn.sig.ident;
let inner_fn_name = Ident::new(&format!("__{}", fn_name), fn_name.span());
let fn_block = &input_fn.block;
// Generate the wrapped function
let output = quote! {
#[no_mangle]
pub unsafe extern "C" fn #fn_name(
args_raw: *mut bun_native_plugin::sys::OnBeforeParseArguments,
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
) {
fn #inner_fn_name(handle: &mut bun_native_plugin::OnBeforeParse) -> Result<()> {
#fn_block
}
let args_path = unsafe { (*args_raw).path_ptr };
let args_path_len = unsafe { (*args_raw).path_len };
let result_pointer = result;
let result = std::panic::catch_unwind(|| {
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args_raw, result) {
Ok(handle) => handle,
Err(_) => return,
};
if let Err(e) = #inner_fn_name(&mut handle) {
handle.log_error(&format!("{:?}", e));
}
});
if let Err(e) = result {
let msg_string = format!("Plugin crashed: {:?}", e);
let mut log_options = bun_native_plugin::log_from_message_and_level(
&msg_string,
bun_native_plugin::sys::BunLogLevel::BUN_LOG_LEVEL_ERROR,
args_path,
args_path_len,
);
unsafe {
((*result_pointer).log.unwrap())(args_raw, &mut log_options);
}
}
}
};
output.into()
}

View File

@@ -1,6 +0,0 @@
import { join } from "node:path";
const dirname = join(import.meta.dir, "../", "bun-native-bundler-plugin-api");
await Bun.$`rm -rf headers`;
await Bun.$`mkdir -p headers`;
await Bun.$`cp -R ${dirname} headers/bun-native-bundler-plugin-api`;

View File

@@ -1,79 +0,0 @@
#ifndef BUN_NATIVE_BUNDLER_PLUGIN_API_H
#define BUN_NATIVE_BUNDLER_PLUGIN_API_H
#include <stddef.h>
#include <stdint.h>
typedef enum {
BUN_LOADER_JSX = 0,
BUN_LOADER_JS = 1,
BUN_LOADER_TS = 2,
BUN_LOADER_TSX = 3,
BUN_LOADER_CSS = 4,
BUN_LOADER_FILE = 5,
BUN_LOADER_JSON = 6,
BUN_LOADER_TOML = 7,
BUN_LOADER_WASM = 8,
BUN_LOADER_NAPI = 9,
BUN_LOADER_BASE64 = 10,
BUN_LOADER_DATAURL = 11,
BUN_LOADER_TEXT = 12,
BUN_LOADER_BUNSH = 13,
BUN_LOADER_SQLITE = 14,
BUN_LOADER_SQLITE_EMBEDDED = 15
} BunLoader;
const BunLoader BUN_LOADER_MAX = BUN_LOADER_SQLITE_EMBEDDED;
typedef struct BunLogOptions {
size_t __struct_size;
const uint8_t* message_ptr;
size_t message_len;
const uint8_t* path_ptr;
size_t path_len;
const uint8_t* source_line_text_ptr;
size_t source_line_text_len;
int8_t level;
int line;
int lineEnd;
int column;
int columnEnd;
} BunLogOptions;
typedef struct {
size_t __struct_size;
void* bun;
const uint8_t* path_ptr;
size_t path_len;
const uint8_t* namespace_ptr;
size_t namespace_len;
uint8_t default_loader;
void *external;
} OnBeforeParseArguments;
typedef struct OnBeforeParseResult {
size_t __struct_size;
uint8_t* source_ptr;
size_t source_len;
uint8_t loader;
int (*fetchSourceCode)(
const OnBeforeParseArguments* args,
struct OnBeforeParseResult* result
);
void* plugin_source_code_context;
void (*free_plugin_source_code_context)(void* ctx);
void (*log)(const OnBeforeParseArguments* args, BunLogOptions* options);
} OnBeforeParseResult;
typedef enum {
BUN_LOG_LEVEL_VERBOSE = 0,
BUN_LOG_LEVEL_DEBUG = 1,
BUN_LOG_LEVEL_INFO = 2,
BUN_LOG_LEVEL_WARN = 3,
BUN_LOG_LEVEL_ERROR = 4,
} BunLogLevel;
const BunLogLevel BUN_LOG_MAX = BUN_LOG_LEVEL_ERROR;
#endif // BUN_NATIVE_BUNDLER_PLUGIN_API_H

View File

@@ -1,684 +0,0 @@
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
//!
//! # Bun Native Plugins
//!
//! This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
//!
//! Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
//!
//! - Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
//! - Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
//!
//! What are native bundler plugins exactly? Precisely, they are NAPI modules which expose a C ABI function which implement a plugin lifecycle hook.
//!
//! The currently supported lifecycle hooks are:
//!
//! - `onBeforeParse` (called immediately before a file is parsed, allows you to modify the source code of the file)
//!
//! ## Getting started
//!
//! Since native bundler plugins are NAPI modules, the easiest way to get started is to create a new [napi-rs](https://github.com/napi-rs/napi-rs) project:
//!
//! ```bash
//! bun add -g @napi-rs/cli
//! napi new
//! ```
//!
//! Then install this crate:
//!
//! ```bash
//! cargo add bun-native-plugin
//! ```
//!
//! Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
//!
//! For example, implementing `onBeforeParse`:
//!
//! ```rust
//! use bun_native_plugin::{OnBeforeParse};
//!
//! /// This is necessary for napi-rs to compile this into a proper NAPI module
//! #[napi]
//! pub fn register_bun_plugin() {}
//!
//! /// Use `no_mangle` so that we can reference this symbol by name later
//! /// when registering this native plugin in JS.
//! ///
//! /// Here we'll create a dummy plugin which replaces all occurrences of
//! /// `foo` with `bar`
//! #[no_mangle]
//! pub extern "C" fn on_before_parse_plugin_impl(
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
//! ) {
//! let args = unsafe { &*args };
//! let result = unsafe { &mut *result };
//!
//! // This returns a handle which is a safe wrapper over the raw
//! // C API.
//! let mut handle = OnBeforeParse::from_raw(args, result) {
//! Ok(handle) => handle,
//! Err(_) => {
//! // `OnBeforeParse::from_raw` handles error logging
//! // so it fine to return here.
//! return;
//! }
//! };
//!
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! // If we encounter an error, we must log it so that
//! // Bun knows this plugin failed.
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//!
//! let loader = handle.output_loader();
//! let output_source_code = source_str.replace("foo", "bar");
//! handle.set_output_source_code(output_source_code, loader);
//! }
//! ```
//!
//! Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
//!
//! ```bash
//! bun run build
//! ```
//!
//! This will produce a `.node` file in the project directory.
//!
//! With the compiled NAPI module, you can now register the plugin from JS:
//!
//! ```js
//! const result = await Bun.build({
//! entrypoints: ["index.ts"],
//! plugins: [
//! {
//! name: "replace-foo-with-bar",
//! setup(build) {
//! const napiModule = require("path/to/napi_module.node");
//!
//! // Register the `onBeforeParse` hook to run on all `.ts` files.
//! // We tell it to use function we implemented inside of our `lib.rs` code.
//! build.onBeforeParse(
//! { filter: /\.ts/ },
//! { napiModule, symbol: "on_before_parse_plugin_impl" },
//! );
//! },
//! },
//! ],
//! });
//! ```
//!
//! ## Very important information
//!
//! ### Error handling and panics
//!
//! It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
//!
//! ```rust
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! // If we encounter an error, we must log it so that
//! // Bun knows this plugin failed.
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//! ```
//!
//! ### Passing state to and from JS: `External`
//!
//! One way to communicate data from your plugin and JS and vice versa is through the NAPI's [External](https://napi.rs/docs/concepts/external) type.
//!
//! An External in NAPI is like an opaque pointer to data that can be passed to and from JS. Inside your NAPI module, you can retrieve
//! the pointer and modify the data.
//!
//! As an example that extends our getting started example above, let's say you wanted to count the number of `foo`'s that the native plugin encounters.
//!
//! You would expose a NAPI module function which creates this state. Recall that state in native plugins must be threadsafe. This usually means
//! that your state must be `Sync`:
//!
//! ```rust
//! struct PluginState {
//! foo_count: std::sync::atomic::AtomicU32,
//! }
//!
//! #[napi]
//! pub fn create_plugin_state() -> External<PluginState> {
//! let external = External::new(PluginState {
//! foo_count: 0,
//! });
//!
//! external
//! }
//!
//!
//! #[napi]
//! pub fn get_foo_count(plugin_state: External<PluginState>) -> u32 {
//! let plugin_state: &PluginState = &plugin_state;
//! plugin_state.foo_count.load(std::sync::atomic::Ordering::Relaxed)
//! }
//! ```
//!
//! When you register your plugin from Javascript, you call the napi module function to create the external and then pass it:
//!
//! ```js
//! const napiModule = require("path/to/napi_module.node");
//! const pluginState = napiModule.createPluginState();
//!
//! const result = await Bun.build({
//! entrypoints: ["index.ts"],
//! plugins: [
//! {
//! name: "replace-foo-with-bar",
//! setup(build) {
//! build.onBeforeParse(
//! { filter: /\.ts/ },
//! {
//! napiModule,
//! symbol: "on_before_parse_plugin_impl",
//! // pass our NAPI external which contains our plugin state here
//! external: pluginState,
//! },
//! );
//! },
//! },
//! ],
//! });
//!
//! console.log("Total `foo`s encountered: ", pluginState.getFooCount());
//! ```
//!
//! Finally, from the native implementation of your plugin, you can extract the external:
//!
//! ```rust
//! pub extern "C" fn on_before_parse_plugin_impl(
//! args: *const bun_native_plugin::sys::OnBeforeParseArguments,
//! result: *mut bun_native_plugin::sys::OnBeforeParseResult,
//! ) {
//! let args = unsafe { &*args };
//! let result = unsafe { &mut *result };
//!
//! let mut handle = OnBeforeParse::from_raw(args, result) {
//! Ok(handle) => handle,
//! Err(_) => {
//! // `OnBeforeParse::from_raw` handles error logging
//! // so it fine to return here.
//! return;
//! }
//! };
//!
//! let plugin_state: &PluginState =
//! // This operation is only safe if you pass in an external when registering the plugin.
//! // If you don't, this could lead to a segfault or access of undefined memory.
//! match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
//! Ok(state) => state,
//! Err(_) => {
//! handle.log_error("Failed to get external!");
//! return;
//! }
//! };
//!
//!
//! // Fetch our source code again
//! let input_source_code = match handle.input_source_code() {
//! Ok(source_str) => source_str,
//! Err(_) => {
//! handle.log_error("Failed to fetch source code!");
//! return;
//! }
//! };
//!
//! // Count the number of `foo`s and add it to our state
//! let foo_count = source_code.matches("foo").count() as u32;
//! plugin_state.foo_count.fetch_add(foo_count, std::sync::atomic::Ordering::Relaxed);
//! }
//! ```
//!
//! ### Concurrency
//!
//! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
//!
//! Therefore, you must design any state management to be threadsafe
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
pub use anyhow;
pub use bun_macro::bun;
#[repr(transparent)]
pub struct BunPluginName(*const c_char);
impl BunPluginName {
pub const fn new(ptr: *const c_char) -> Self {
Self(ptr)
}
}
#[macro_export]
macro_rules! define_bun_plugin {
($name:expr) => {
pub static BUN_PLUGIN_NAME_STRING: &str = concat!($name, "\0");
#[no_mangle]
pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName =
bun_native_plugin::BunPluginName::new(BUN_PLUGIN_NAME_STRING.as_ptr() as *const _);
#[napi]
fn bun_plugin_register() {}
};
}
unsafe impl Sync for BunPluginName {}
use std::{
any::TypeId,
borrow::Cow,
cell::UnsafeCell,
ffi::{c_char, c_void},
marker::PhantomData,
str::Utf8Error,
sync::PoisonError,
};
pub mod sys {
include!(concat!(env!("OUT_DIR"), "/bindings.rs"));
}
#[repr(C)]
pub struct TaggedObject<T> {
type_id: TypeId,
pub(crate) object: Option<T>,
}
struct SourceCodeContext {
source_ptr: *mut u8,
source_len: usize,
source_cap: usize,
}
extern "C" fn free_plugin_source_code_context(ctx: *mut c_void) {
// SAFETY: The ctx pointer is a pointer to the `SourceCodeContext` struct we allocated.
unsafe {
drop(Box::from_raw(ctx as *mut SourceCodeContext));
}
}
impl Drop for SourceCodeContext {
fn drop(&mut self) {
if !self.source_ptr.is_null() {
// SAFETY: These fields come from a `String` that we allocated.
unsafe {
drop(String::from_raw_parts(
self.source_ptr,
self.source_len,
self.source_cap,
));
}
}
}
}
pub type BunLogLevel = sys::BunLogLevel;
pub type BunLoader = sys::BunLoader;
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> PluginResult<Cow<'a, str>> {
let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) };
// Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig.
// Meaning the string may contain invalid UTF-8, we'll have to use the safe checked version.
#[cfg(target_os = "windows")]
{
std::str::from_utf8(slice)
.map(Into::into)
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
}
#[cfg(not(target_os = "windows"))]
{
// SAFETY: The source code comes from Zig, which uses UTF-8, so this should be safe.
std::str::from_utf8(slice)
.map(Into::into)
.or_else(|_| Ok(String::from_utf8_lossy(slice)))
}
}
#[derive(Debug, Clone)]
pub enum Error {
Utf8(Utf8Error),
IncompatiblePluginVersion,
ExternalTypeMismatch,
Unknown,
LockPoisoned,
}
pub type PluginResult<T> = std::result::Result<T, Error>;
pub type Result<T> = anyhow::Result<T>;
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
None
}
fn description(&self) -> &str {
"description() is deprecated; use Display"
}
fn cause(&self) -> Option<&dyn std::error::Error> {
self.source()
}
}
impl From<Utf8Error> for Error {
fn from(value: Utf8Error) -> Self {
Self::Utf8(value)
}
}
impl<Guard> From<PoisonError<Guard>> for Error {
fn from(_: PoisonError<Guard>) -> Self {
Self::LockPoisoned
}
}
/// A safe handle for the arguments + result struct for the
/// `OnBeforeParse` bundler lifecycle hook.
///
/// This struct acts as a safe wrapper around the raw C API structs
/// (`sys::OnBeforeParseArguments`/`sys::OnBeforeParseResult`) needed to
/// implement the `OnBeforeParse` bundler lifecycle hook.
///
/// To initialize this struct, see the `from_raw` method.
pub struct OnBeforeParse<'a> {
pub args_raw: *mut sys::OnBeforeParseArguments,
result_raw: *mut sys::OnBeforeParseResult,
compilation_context: *mut SourceCodeContext,
__phantom: PhantomData<&'a ()>,
}
impl<'a> OnBeforeParse<'a> {
/// Initialize this struct from references to their raw counterparts.
///
/// This function will do a versioning check to ensure that the plugin
/// is compatible with the current version of Bun. If the plugin is not
/// compatible, it will log an error and return an error result.
///
/// # Example
/// ```rust
/// extern "C" fn on_before_parse_impl(args: *const sys::OnBeforeParseArguments, result: *mut sys::OnBeforeParseResult) {
/// let args = unsafe { &*args };
/// let result = unsafe { &mut *result };
/// let handle = match OnBeforeParse::from_raw(args, result) {
/// Ok(handle) => handle,
/// Err(()) => return,
/// };
/// }
/// ```
pub fn from_raw(
args: *mut sys::OnBeforeParseArguments,
result: *mut sys::OnBeforeParseResult,
) -> PluginResult<Self> {
if unsafe { (*args).__struct_size } < std::mem::size_of::<sys::OnBeforeParseArguments>()
|| unsafe { (*result).__struct_size } < std::mem::size_of::<sys::OnBeforeParseResult>()
{
let message = "This plugin is not compatible with the current version of Bun.";
let mut log_options = sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: unsafe { (*args).path_ptr },
path_len: unsafe { (*args).path_len },
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
};
// SAFETY: The `log` function pointer is guaranteed to be valid by the Bun runtime.
unsafe {
((*result).log.unwrap())(args, &mut log_options);
}
return Err(Error::IncompatiblePluginVersion);
}
Ok(Self {
args_raw: args,
result_raw: result,
compilation_context: std::ptr::null_mut() as *mut _,
__phantom: Default::default(),
})
}
pub fn path(&self) -> PluginResult<Cow<'_, str>> {
unsafe { get_from_raw_str((*self.args_raw).path_ptr, (*self.args_raw).path_len) }
}
pub fn namespace(&self) -> PluginResult<Cow<'_, str>> {
unsafe {
get_from_raw_str(
(*self.args_raw).namespace_ptr,
(*self.args_raw).namespace_len,
)
}
}
/// Get the external object from the `OnBeforeParse` arguments.
///
/// The external object is set by the plugin definition inside of JS:
/// ```js
/// await Bun.build({
/// plugins: [
/// {
/// name: "my-plugin",
/// setup(builder) {
/// const native_plugin = require("./native_plugin.node");
/// const external = native_plugin.createExternal();
/// builder.external({ napiModule: native_plugin, symbol: 'onBeforeParse', external });
/// },
/// },
/// ],
/// });
/// ```
///
/// The external object must be created from NAPI for this function to be safe!
///
/// This function will return an error if the external object is not a
/// valid tagged object for the given type.
///
/// This function will return `Ok(None)` if there is no external object
/// set.
///
/// # Example
/// The code to create the external from napi-rs:
/// ```rs
/// #[no_mangle]
/// #[napi]
/// pub fn create_my_external() -> External<MyStruct> {
/// let external = External::new(MyStruct::new());
///
/// external
/// }
/// ```
///
/// The code to extract the external:
/// ```rust
/// let external = match handle.external::<MyStruct>() {
/// Ok(Some(external)) => external,
/// _ => {
/// handle.log_error("Could not get external object.");
/// return;
/// },
/// };
/// ```
pub unsafe fn external<T: 'static + Sync>(&self) -> PluginResult<Option<&'static T>> {
if unsafe { (*self.args_raw).external.is_null() } {
return Ok(None);
}
let external: *mut TaggedObject<T> =
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
unsafe {
if (*external).type_id != TypeId::of::<T>() {
return Err(Error::ExternalTypeMismatch);
}
Ok((*external).object.as_ref())
}
}
/// The same as [`crate::bun_native_plugin::OnBeforeParse::external`], but returns a mutable reference.
///
/// This is unsafe as you must ensure that no other invocation of the plugin
/// simultaneously holds a mutable reference to the external.
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> PluginResult<Option<&mut T>> {
if unsafe { (*self.args_raw).external.is_null() } {
return Ok(None);
}
let external: *mut TaggedObject<T> =
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
unsafe {
if (*external).type_id != TypeId::of::<T>() {
return Err(Error::ExternalTypeMismatch);
}
Ok((*external).object.as_mut())
}
}
/// Get the input source code for the current file.
///
/// On Windows, this function may return an `Err(Error::Utf8(...))` if the
/// source code contains invalid UTF-8.
pub fn input_source_code(&self) -> PluginResult<Cow<'_, str>> {
let fetch_result = unsafe {
((*self.result_raw).fetchSourceCode.unwrap())(
self.args_raw as *const _,
self.result_raw,
)
};
if fetch_result != 0 {
Err(Error::Unknown)
} else {
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing here is safe.
unsafe {
get_from_raw_str((*self.result_raw).source_ptr, (*self.result_raw).source_len)
}
}
}
/// Set the output source code for the current file.
pub fn set_output_source_code(&mut self, source: String, loader: BunLoader) {
let source_cap = source.capacity();
let source = source.leak();
let source_ptr = source.as_mut_ptr();
let source_len = source.len();
if self.compilation_context.is_null() {
self.compilation_context = Box::into_raw(Box::new(SourceCodeContext {
source_ptr,
source_len,
source_cap,
}));
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).plugin_source_code_context =
self.compilation_context as *mut c_void;
(*self.result_raw).free_plugin_source_code_context =
Some(free_plugin_source_code_context);
}
} else {
unsafe {
// SAFETY: If we're here we know that `compilation_context` is not null.
let context = &mut *self.compilation_context;
drop(String::from_raw_parts(
context.source_ptr,
context.source_len,
context.source_cap,
));
context.source_ptr = source_ptr;
context.source_len = source_len;
context.source_cap = source_cap;
}
}
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).loader = loader as u8;
(*self.result_raw).source_ptr = source_ptr;
(*self.result_raw).source_len = source_len;
}
}
/// Set the output loader for the current file.
pub fn set_output_loader(&self, loader: BunLoader) {
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
unsafe {
(*self.result_raw).loader = loader as u8;
}
}
/// Get the output loader for the current file.
pub fn output_loader(&self) -> BunLoader {
unsafe { std::mem::transmute((*self.result_raw).loader as u32) }
}
/// Log an error message.
pub fn log_error(&self, message: &str) {
self.log(message, BunLogLevel::BUN_LOG_LEVEL_ERROR)
}
/// Log a message with the given level.
pub fn log(&self, message: &str, level: BunLogLevel) {
let mut log_options = log_from_message_and_level(
message,
level,
unsafe { (*self.args_raw).path_ptr },
unsafe { (*self.args_raw).path_len },
);
unsafe {
((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options);
}
}
}
pub fn log_from_message_and_level(
message: &str,
level: BunLogLevel,
path: *const u8,
path_len: usize,
) -> sys::BunLogOptions {
sys::BunLogOptions {
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
message_ptr: message.as_ptr(),
message_len: message.len(),
path_ptr: path as *const _,
path_len,
source_line_text_ptr: std::ptr::null(),
source_line_text_len: 0,
level: level as i8,
line: 0,
lineEnd: 0,
column: 0,
columnEnd: 0,
}
}

View File

@@ -1 +0,0 @@
#include <bun-native-bundler-plugin-api/bundler_plugin.h>

View File

@@ -15,7 +15,7 @@ This plugin can be used to support `.yaml` loaders in Bun's bundler by passing i
```ts
import yamlPlugin from "bun-plugin-yaml";
await Bun.build({
Bun.build({
entrypoints: ["./index.tsx"],
// other config

Some files were not shown because too many files have changed in this diff Show More