mirror of
https://github.com/oven-sh/bun
synced 2026-02-22 08:41:46 +00:00
Compare commits
28 Commits
bun-v1.1.4
...
nektro-pat
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a15f2f654 | ||
|
|
32aa69e0da | ||
|
|
6d879448aa | ||
|
|
b53e5683d1 | ||
|
|
96697cbbb8 | ||
|
|
12f61eb666 | ||
|
|
14a3a15cd0 | ||
|
|
1b2d9cdd80 | ||
|
|
405eebdba0 | ||
|
|
a1d20ee54a | ||
|
|
e006781c42 | ||
|
|
25e365d3fb | ||
|
|
3ee3a8ddab | ||
|
|
6d07bb58f3 | ||
|
|
3b2bea3e7a | ||
|
|
62064490e6 | ||
|
|
9153916d69 | ||
|
|
53877382fb | ||
|
|
3d28cd4eb0 | ||
|
|
2c6728a8ce | ||
|
|
ad7343a122 | ||
|
|
9a670fc690 | ||
|
|
9cf67e301b | ||
|
|
b84d96ab0d | ||
|
|
2eb19063c1 | ||
|
|
d401eab461 | ||
|
|
2ef22d5c2c | ||
|
|
deab3a6029 |
@@ -1,170 +0,0 @@
|
||||
ARG LLVM_VERSION="18"
|
||||
ARG REPORTED_LLVM_VERSION="18.1.8"
|
||||
ARG OLD_BUN_VERSION="1.1.38"
|
||||
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
|
||||
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
|
||||
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
|
||||
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:18.04 as base-amd64
|
||||
FROM base-$TARGETARCH as base
|
||||
|
||||
ARG LLVM_VERSION
|
||||
ARG OLD_BUN_VERSION
|
||||
ARG TARGETARCH
|
||||
ARG DEFAULT_CXXFLAGS
|
||||
ARG DEFAULT_CFLAGS
|
||||
ARG REPORTED_LLVM_VERSION
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
CI=true \
|
||||
DOCKER=true
|
||||
|
||||
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
|
||||
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
|
||||
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
|
||||
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
|
||||
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
|
||||
libgfortran5 libc6-dev \
|
||||
&& wget https://apt.llvm.org/llvm.sh \
|
||||
&& chmod +x llvm.sh \
|
||||
&& ./llvm.sh ${LLVM_VERSION} all \
|
||||
&& rm llvm.sh
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
cmake_version="3.30.5" && \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
|
||||
else \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
|
||||
fi && \
|
||||
wget -O /tmp/cmake.sh "$cmake_url" && \
|
||||
sh /tmp/cmake.sh --skip-license --prefix=/usr
|
||||
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
|
||||
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
|
||||
|
||||
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
|
||||
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
|
||||
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
|
||||
CFLAGS=${DEFAULT_CFLAGS} \
|
||||
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
|
||||
|
||||
RUN if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
export ARCH_PATH="aarch64-linux-gnu"; \
|
||||
else \
|
||||
export ARCH_PATH="x86_64-linux-gnu"; \
|
||||
fi \
|
||||
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
|
||||
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
|
||||
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& ldconfig
|
||||
|
||||
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
|
||||
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
|
||||
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
|
||||
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++
|
||||
|
||||
ENV CC="clang" \
|
||||
CXX="clang++" \
|
||||
AR="llvm-ar-${LLVM_VERSION}" \
|
||||
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
|
||||
LD="lld-${LLVM_VERSION}"
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
bash -c '\
|
||||
set -euxo pipefail && \
|
||||
source /etc/environment && \
|
||||
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
|
||||
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
|
||||
unzip /tmp/bun.zip -d /tmp/bun && \
|
||||
mv /tmp/bun/*/bun /usr/bin/bun && \
|
||||
chmod +x /usr/bin/bun'
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as buildkite
|
||||
ARG BUILDKITE_AGENT_TAGS
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
|
||||
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
|
||||
echo "Downloading buildkite" && \
|
||||
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
|
||||
mkdir -p /tmp/buildkite-agent && \
|
||||
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
|
||||
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
|
||||
|
||||
|
||||
WORKDIR /var/bun/scripts
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
|
||||
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
|
||||
|
||||
ARG LLVM_VERSION
|
||||
WORKDIR /workspace/bun
|
||||
|
||||
COPY . /workspace/bun
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/workspace/bun/build \
|
||||
ls -la \
|
||||
&& bun run build:release \
|
||||
&& mkdir -p /target \
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
@@ -1,122 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Check if running as root
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "error: must run as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check OS compatibility
|
||||
if ! command -v dnf &> /dev/null; then
|
||||
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
|
||||
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
|
||||
# Print each missing file
|
||||
if [ ! -f /tmp/agent.mjs ]; then
|
||||
echo "error: /tmp/agent.mjs is missing"
|
||||
fi
|
||||
if [ ! -f /tmp/Dockerfile ]; then
|
||||
echo "error: /tmp/Dockerfile is missing"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install Docker
|
||||
dnf update -y
|
||||
dnf install -y docker
|
||||
|
||||
systemctl enable docker
|
||||
systemctl start docker || {
|
||||
echo "error: failed to start Docker"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create builder
|
||||
docker buildx create --name builder --driver docker-container --bootstrap --use || {
|
||||
echo "error: failed to create Docker buildx builder"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Set up Docker to start on boot
|
||||
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
|
||||
[Unit]
|
||||
Description=Buildkite Docker Container
|
||||
After=docker.service network-online.target
|
||||
Requires=docker.service network-online.target
|
||||
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
ExecStartPre=-/usr/bin/docker stop buildkite
|
||||
ExecStartPre=-/usr/bin/docker rm buildkite
|
||||
ExecStart=/usr/bin/docker run \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
--network host \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v /tmp:/tmp \
|
||||
buildkite:latest
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
echo "Building Buildkite image"
|
||||
|
||||
# Clean up any previous build artifacts
|
||||
rm -rf /tmp/fakebun
|
||||
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
|
||||
|
||||
# Copy required files
|
||||
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
|
||||
echo "error: failed to copy agent.mjs"
|
||||
exit 1
|
||||
}
|
||||
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
|
||||
echo "error: failed to copy Dockerfile"
|
||||
exit 1
|
||||
}
|
||||
|
||||
cd /tmp/fakebun || {
|
||||
echo "error: failed to change directory"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build the Buildkite image
|
||||
docker buildx build \
|
||||
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
|
||||
--tag buildkite:latest \
|
||||
--target buildkite \
|
||||
-f .buildkite/Dockerfile \
|
||||
--load \
|
||||
. || {
|
||||
echo "error: Docker build failed"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create container to ensure image is cached in AMI
|
||||
docker container create \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
buildkite:latest || {
|
||||
echo "error: failed to create buildkite container"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Reload systemd to pick up new service
|
||||
systemctl daemon-reload
|
||||
|
||||
# Enable the service, but don't start it yet
|
||||
systemctl enable buildkite-agent || {
|
||||
echo "error: failed to enable buildkite-agent service"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Bootstrap complete"
|
||||
echo "To start the Buildkite agent, run: "
|
||||
echo " systemctl start buildkite-agent"
|
||||
@@ -13,4 +13,19 @@ steps:
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- "node .buildkite/ci.mjs"
|
||||
- ".buildkite/scripts/prepare-build.sh"
|
||||
|
||||
- if: "build.branch == 'main' && !build.pull_request.repository.fork"
|
||||
label: ":github:"
|
||||
agents:
|
||||
queue: "test-darwin"
|
||||
depends_on:
|
||||
- "darwin-aarch64-build-bun"
|
||||
- "darwin-x64-build-bun"
|
||||
- "linux-aarch64-build-bun"
|
||||
- "linux-x64-build-bun"
|
||||
- "linux-x64-baseline-build-bun"
|
||||
- "windows-x64-build-bun"
|
||||
- "windows-x64-baseline-build-bun"
|
||||
command:
|
||||
- ".buildkite/scripts/upload-release.sh"
|
||||
|
||||
@@ -11,7 +11,6 @@ import {
|
||||
getBuildkiteEmoji,
|
||||
getBuildMetadata,
|
||||
getBuildNumber,
|
||||
getCanaryRevision,
|
||||
getCommitMessage,
|
||||
getEmoji,
|
||||
getEnv,
|
||||
@@ -44,6 +43,7 @@ import {
|
||||
* @property {Arch} arch
|
||||
* @property {Abi} [abi]
|
||||
* @property {boolean} [baseline]
|
||||
* @property {boolean} [canary]
|
||||
* @property {Profile} [profile]
|
||||
*/
|
||||
|
||||
@@ -91,11 +91,11 @@ function getTargetLabel(target) {
|
||||
* @property {Arch} arch
|
||||
* @property {Abi} [abi]
|
||||
* @property {boolean} [baseline]
|
||||
* @property {boolean} [canary]
|
||||
* @property {Profile} [profile]
|
||||
* @property {Distro} [distro]
|
||||
* @property {string} release
|
||||
* @property {Tier} [tier]
|
||||
* @property {string[]} [features]
|
||||
*/
|
||||
|
||||
/**
|
||||
@@ -103,10 +103,9 @@ function getTargetLabel(target) {
|
||||
*/
|
||||
const buildPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "14" },
|
||||
{ os: "darwin", arch: "x64", release: "14" },
|
||||
{ os: "linux", arch: "aarch64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "amazonlinux", release: "2023", features: ["docker"] },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "11" },
|
||||
{ os: "linux", arch: "aarch64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", distro: "alpine", release: "3.20" },
|
||||
{ os: "linux", arch: "x64", abi: "musl", baseline: true, distro: "alpine", release: "3.20" },
|
||||
@@ -120,8 +119,6 @@ const buildPlatforms = [
|
||||
const testPlatforms = [
|
||||
{ os: "darwin", arch: "aarch64", release: "14", tier: "latest" },
|
||||
{ os: "darwin", arch: "aarch64", release: "13", tier: "previous" },
|
||||
{ os: "darwin", arch: "x64", release: "14", tier: "latest" },
|
||||
{ os: "darwin", arch: "x64", release: "13", tier: "previous" },
|
||||
{ os: "linux", arch: "aarch64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", distro: "debian", release: "12", tier: "latest" },
|
||||
{ os: "linux", arch: "x64", baseline: true, distro: "debian", release: "12", tier: "latest" },
|
||||
@@ -176,21 +173,12 @@ function getPlatformLabel(platform) {
|
||||
* @returns {string}
|
||||
*/
|
||||
function getImageKey(platform) {
|
||||
const { os, arch, distro, release, features, abi } = platform;
|
||||
const { os, arch, distro, release } = platform;
|
||||
const version = release.replace(/\./g, "");
|
||||
let key = `${os}-${arch}-${version}`;
|
||||
if (distro) {
|
||||
key += `-${distro}`;
|
||||
return `${os}-${arch}-${distro}-${version}`;
|
||||
}
|
||||
if (features?.length) {
|
||||
key += `-with-${features.join("-")}`;
|
||||
}
|
||||
|
||||
if (abi) {
|
||||
key += `-${abi}`;
|
||||
}
|
||||
|
||||
return key;
|
||||
return `${os}-${arch}-${version}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -204,19 +192,15 @@ function getImageLabel(platform) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} [dryRun]
|
||||
* @returns {string}
|
||||
*/
|
||||
function getImageName(platform, options) {
|
||||
const { os } = platform;
|
||||
const { buildImages, publishImages } = options;
|
||||
|
||||
const name = getImageKey(platform);
|
||||
|
||||
if (buildImages && !publishImages) {
|
||||
function getImageName(platform, dryRun) {
|
||||
const { os, arch, distro, release } = platform;
|
||||
const name = distro ? `${os}-${arch}-${distro}-${release}` : `${os}-${arch}-${release}`;
|
||||
if (dryRun) {
|
||||
return `${name}-build-${getBuildNumber()}`;
|
||||
}
|
||||
|
||||
return `${name}-v${getBootstrapVersion(os)}`;
|
||||
}
|
||||
|
||||
@@ -270,22 +254,23 @@ function getPriority() {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {Ec2Options} ec2Options
|
||||
* @param {Ec2Options} options
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getEc2Agent(platform, options, ec2Options) {
|
||||
function getEc2Agent(platform, options) {
|
||||
const { os, arch, abi, distro, release } = platform;
|
||||
const { instanceType, cpuCount, threadsPerCore } = ec2Options;
|
||||
const { instanceType, cpuCount, threadsPerCore, dryRun } = options;
|
||||
return {
|
||||
os,
|
||||
arch,
|
||||
abi,
|
||||
distro,
|
||||
release,
|
||||
// The agent is created by robobun, see more details here:
|
||||
// https://github.com/oven-sh/robobun/blob/d46c07e0ac5ac0f9ffe1012f0e98b59e1a0d387a/src/robobun.ts#L1707
|
||||
robobun: true,
|
||||
robobun2: true,
|
||||
"image-name": getImageName(platform, options),
|
||||
"image-name": getImageName(platform, dryRun),
|
||||
"instance-type": instanceType,
|
||||
"cpu-count": cpuCount,
|
||||
"threads-per-core": threadsPerCore,
|
||||
@@ -295,11 +280,10 @@ function getEc2Agent(platform, options, ec2Options) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {string}
|
||||
*/
|
||||
function getCppAgent(platform, options) {
|
||||
const { os, arch, distro } = platform;
|
||||
function getCppAgent(platform, dryRun) {
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
return {
|
||||
@@ -309,31 +293,45 @@ function getCppAgent(platform, options) {
|
||||
};
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
return getEc2Agent(platform, {
|
||||
instanceType: arch === "aarch64" ? "c8g.16xlarge" : "c7i.16xlarge",
|
||||
cpuCount: 32,
|
||||
threadsPerCore: 1,
|
||||
dryRun,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getZigAgent(platform, options) {
|
||||
function getZigAgent(platform, dryRun) {
|
||||
const { arch } = platform;
|
||||
|
||||
return {
|
||||
queue: "build-zig",
|
||||
};
|
||||
|
||||
// return getEc2Agent(
|
||||
// {
|
||||
// os: "linux",
|
||||
// arch,
|
||||
// distro: "debian",
|
||||
// release: "11",
|
||||
// },
|
||||
// {
|
||||
// instanceType: arch === "aarch64" ? "c8g.2xlarge" : "c7i.2xlarge",
|
||||
// cpuCount: 4,
|
||||
// threadsPerCore: 1,
|
||||
// },
|
||||
// );
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Agent}
|
||||
*/
|
||||
function getTestAgent(platform, options) {
|
||||
function getTestAgent(platform, dryRun) {
|
||||
const { os, arch } = platform;
|
||||
|
||||
if (os === "darwin") {
|
||||
@@ -346,25 +344,28 @@ function getTestAgent(platform, options) {
|
||||
|
||||
// TODO: `dev-server-ssr-110.test.ts` and `next-build.test.ts` run out of memory at 8GB of memory, so use 16GB instead.
|
||||
if (os === "windows") {
|
||||
return getEc2Agent(platform, options, {
|
||||
return getEc2Agent(platform, {
|
||||
instanceType: "c7i.2xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
dryRun,
|
||||
});
|
||||
}
|
||||
|
||||
if (arch === "aarch64") {
|
||||
return getEc2Agent(platform, options, {
|
||||
return getEc2Agent(platform, {
|
||||
instanceType: "c8g.xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
dryRun,
|
||||
});
|
||||
}
|
||||
|
||||
return getEc2Agent(platform, options, {
|
||||
return getEc2Agent(platform, {
|
||||
instanceType: "c7i.xlarge",
|
||||
cpuCount: 2,
|
||||
threadsPerCore: 1,
|
||||
dryRun,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -374,20 +375,16 @@ function getTestAgent(platform, options) {
|
||||
|
||||
/**
|
||||
* @param {Target} target
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Record<string, string | undefined>}
|
||||
*/
|
||||
function getBuildEnv(target, options) {
|
||||
const { profile, baseline, abi } = target;
|
||||
function getBuildEnv(target) {
|
||||
const { profile, baseline, canary, abi } = target;
|
||||
const release = !profile || profile === "release";
|
||||
const { canary } = options;
|
||||
const revision = typeof canary === "number" ? canary : 1;
|
||||
|
||||
return {
|
||||
CMAKE_BUILD_TYPE: release ? "Release" : profile === "debug" ? "Debug" : "RelWithDebInfo",
|
||||
ENABLE_BASELINE: baseline ? "ON" : "OFF",
|
||||
ENABLE_CANARY: revision > 0 ? "ON" : "OFF",
|
||||
CANARY_REVISION: revision,
|
||||
ENABLE_CANARY: canary ? "ON" : "OFF",
|
||||
ENABLE_ASSERTIONS: release ? "OFF" : "ON",
|
||||
ENABLE_LOGS: release ? "OFF" : "ON",
|
||||
ABI: abi === "musl" ? "musl" : undefined,
|
||||
@@ -396,36 +393,36 @@ function getBuildEnv(target, options) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} dryRun
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildVendorStep(platform, options) {
|
||||
function getBuildVendorStep(platform, dryRun) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-vendor`,
|
||||
label: `${getTargetLabel(platform)} - build-vendor`,
|
||||
agents: getCppAgent(platform, options),
|
||||
agents: getCppAgent(platform, dryRun),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
env: getBuildEnv(platform),
|
||||
command: "bun run build:ci --target dependencies",
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} dryRun
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildCppStep(platform, options) {
|
||||
function getBuildCppStep(platform, dryRun) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-cpp`,
|
||||
label: `${getTargetLabel(platform)} - build-cpp`,
|
||||
agents: getCppAgent(platform, options),
|
||||
agents: getCppAgent(platform, dryRun),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_CPP_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
...getBuildEnv(platform),
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
@@ -449,28 +446,27 @@ function getBuildToolchain(target) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildZigStep(platform, options) {
|
||||
function getBuildZigStep(platform, dryRun) {
|
||||
const toolchain = getBuildToolchain(platform);
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-zig`,
|
||||
label: `${getTargetLabel(platform)} - build-zig`,
|
||||
agents: getZigAgent(platform, options),
|
||||
agents: getZigAgent(platform, dryRun),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
env: getBuildEnv(platform),
|
||||
command: `bun run build:ci --target bun-zig --toolchain ${toolchain}`,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} dryRun
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getLinkBunStep(platform, options) {
|
||||
function getLinkBunStep(platform, dryRun) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
@@ -479,12 +475,12 @@ function getLinkBunStep(platform, options) {
|
||||
`${getTargetKey(platform)}-build-cpp`,
|
||||
`${getTargetKey(platform)}-build-zig`,
|
||||
],
|
||||
agents: getCppAgent(platform, options),
|
||||
agents: getCppAgent(platform, dryRun),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: {
|
||||
BUN_LINK_ONLY: "ON",
|
||||
...getBuildEnv(platform, options),
|
||||
...getBuildEnv(platform),
|
||||
},
|
||||
command: "bun run build:ci --target bun",
|
||||
};
|
||||
@@ -492,17 +488,17 @@ function getLinkBunStep(platform, options) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} dryRun
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildBunStep(platform, options) {
|
||||
function getBuildBunStep(platform, dryRun) {
|
||||
return {
|
||||
key: `${getTargetKey(platform)}-build-bun`,
|
||||
label: `${getTargetLabel(platform)} - build-bun`,
|
||||
agents: getCppAgent(platform, options),
|
||||
agents: getCppAgent(platform, dryRun),
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
env: getBuildEnv(platform, options),
|
||||
env: getBuildEnv(platform),
|
||||
command: "bun run build:ci",
|
||||
};
|
||||
}
|
||||
@@ -517,13 +513,12 @@ function getBuildBunStep(platform, options) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {TestOptions} [testOptions]
|
||||
* @param {TestOptions} [options]
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getTestBunStep(platform, options, testOptions = {}) {
|
||||
function getTestBunStep(platform, options = {}) {
|
||||
const { os } = platform;
|
||||
const { buildId, unifiedTests, testFiles } = testOptions;
|
||||
const { buildId, unifiedTests, testFiles, dryRun } = options;
|
||||
|
||||
const args = [`--step=${getTargetKey(platform)}-build-bun`];
|
||||
if (buildId) {
|
||||
@@ -534,6 +529,9 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
}
|
||||
|
||||
const depends = [];
|
||||
if (dryRun && os !== "darwin") {
|
||||
depends.push(`${getImageKey(platform)}-build-image`);
|
||||
}
|
||||
if (!buildId) {
|
||||
depends.push(`${getTargetKey(platform)}-build-bun`);
|
||||
}
|
||||
@@ -542,9 +540,10 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
key: `${getPlatformKey(platform)}-test-bun`,
|
||||
label: `${getPlatformLabel(platform)} - test-bun`,
|
||||
depends_on: depends,
|
||||
agents: getTestAgent(platform, options),
|
||||
retry: getRetry(),
|
||||
agents: getTestAgent(platform, dryRun),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
retry: getRetry(),
|
||||
soft_fail: isMainBranch() ? true : [{ exit_status: 2 }],
|
||||
parallelism: unifiedTests ? undefined : os === "darwin" ? 2 : 10,
|
||||
command:
|
||||
os === "windows"
|
||||
@@ -555,14 +554,12 @@ function getTestBunStep(platform, options, testOptions = {}) {
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @param {PipelineOptions} options
|
||||
* @param {boolean} [dryRun]
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getBuildImageStep(platform, options) {
|
||||
const { os, arch, distro, release, features } = platform;
|
||||
const { publishImages } = options;
|
||||
const action = publishImages ? "publish-image" : "create-image";
|
||||
|
||||
function getBuildImageStep(platform, dryRun) {
|
||||
const { os, arch, distro, release } = platform;
|
||||
const action = dryRun ? "create-image" : "publish-image";
|
||||
const command = [
|
||||
"node",
|
||||
"./scripts/machine.mjs",
|
||||
@@ -575,10 +572,6 @@ function getBuildImageStep(platform, options) {
|
||||
"--ci",
|
||||
"--authorized-org=oven-sh",
|
||||
];
|
||||
for (const feature of features || []) {
|
||||
command.push(`--feature=${feature}`);
|
||||
}
|
||||
|
||||
return {
|
||||
key: `${getImageKey(platform)}-build-image`,
|
||||
label: `${getImageLabel(platform)} - build-image`,
|
||||
@@ -589,21 +582,16 @@ function getBuildImageStep(platform, options) {
|
||||
DEBUG: "1",
|
||||
},
|
||||
retry: getRetry(),
|
||||
cancel_on_build_failing: isMergeQueue(),
|
||||
command: command.filter(Boolean).join(" "),
|
||||
timeout_in_minutes: 3 * 60,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Platform[]} buildPlatforms
|
||||
* @param {PipelineOptions} options
|
||||
* @param {Platform[]} [buildPlatforms]
|
||||
* @returns {Step}
|
||||
*/
|
||||
function getReleaseStep(buildPlatforms, options) {
|
||||
const { canary } = options;
|
||||
const revision = typeof canary === "number" ? canary : 1;
|
||||
|
||||
function getReleaseStep(buildPlatforms) {
|
||||
return {
|
||||
key: "release",
|
||||
label: getBuildkiteEmoji("rocket"),
|
||||
@@ -611,9 +599,6 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
queue: "test-darwin",
|
||||
},
|
||||
depends_on: buildPlatforms.map(platform => `${getTargetKey(platform)}-build-bun`),
|
||||
env: {
|
||||
CANARY: revision,
|
||||
},
|
||||
command: ".buildkite/scripts/upload-release.sh",
|
||||
};
|
||||
}
|
||||
@@ -703,7 +688,7 @@ function getReleaseStep(buildPlatforms, options) {
|
||||
* @property {string | boolean} [forceTests]
|
||||
* @property {string | boolean} [buildImages]
|
||||
* @property {string | boolean} [publishImages]
|
||||
* @property {number} [canary]
|
||||
* @property {boolean} [canary]
|
||||
* @property {Profile[]} [buildProfiles]
|
||||
* @property {Platform[]} [buildPlatforms]
|
||||
* @property {Platform[]} [testPlatforms]
|
||||
@@ -921,7 +906,6 @@ async function getPipelineOptions() {
|
||||
return;
|
||||
}
|
||||
|
||||
const canary = await getCanaryRevision();
|
||||
const buildPlatformsMap = new Map(buildPlatforms.map(platform => [getTargetKey(platform), platform]));
|
||||
const testPlatformsMap = new Map(testPlatforms.map(platform => [getPlatformKey(platform), platform]));
|
||||
|
||||
@@ -944,13 +928,13 @@ async function getPipelineOptions() {
|
||||
const buildPlatformKeys = parseArray(options["build-platforms"]);
|
||||
const testPlatformKeys = parseArray(options["test-platforms"]);
|
||||
return {
|
||||
canary: parseBoolean(options["canary"]) ? canary : 0,
|
||||
canary: parseBoolean(options["canary"]),
|
||||
skipBuilds: parseBoolean(options["skip-builds"]),
|
||||
forceBuilds: parseBoolean(options["force-builds"]),
|
||||
skipTests: parseBoolean(options["skip-tests"]),
|
||||
testFiles: parseArray(options["test-files"]),
|
||||
buildImages: parseBoolean(options["build-images"]),
|
||||
publishImages: parseBoolean(options["publish-images"]),
|
||||
testFiles: parseArray(options["test-files"]),
|
||||
unifiedBuilds: parseBoolean(options["unified-builds"]),
|
||||
unifiedTests: parseBoolean(options["unified-tests"]),
|
||||
buildProfiles: parseArray(options["build-profiles"]),
|
||||
@@ -960,7 +944,6 @@ async function getPipelineOptions() {
|
||||
testPlatforms: testPlatformKeys?.length
|
||||
? testPlatformKeys.map(key => testPlatformsMap.get(key))
|
||||
: Array.from(testPlatformsMap.values()),
|
||||
dryRun: parseBoolean(options["dry-run"]),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -979,17 +962,15 @@ async function getPipelineOptions() {
|
||||
return false;
|
||||
};
|
||||
|
||||
const isCanary =
|
||||
!parseBoolean(getEnv("RELEASE", false) || "false") &&
|
||||
!/\[(release|build release|release build)\]/i.test(commitMessage);
|
||||
return {
|
||||
canary: isCanary ? canary : 0,
|
||||
canary:
|
||||
!parseBoolean(getEnv("RELEASE", false) || "false") &&
|
||||
!/\[(release|build release|release build)\]/i.test(commitMessage),
|
||||
skipEverything: parseOption(/\[(skip ci|no ci)\]/i),
|
||||
skipBuilds: parseOption(/\[(skip builds?|no builds?|only tests?)\]/i),
|
||||
forceBuilds: parseOption(/\[(force builds?)\]/i),
|
||||
skipTests: parseOption(/\[(skip tests?|no tests?|only builds?)\]/i),
|
||||
buildImages: parseOption(/\[(build images?)\]/i),
|
||||
dryRun: parseOption(/\[(dry run)\]/i),
|
||||
publishImages: parseOption(/\[(publish images?)\]/i),
|
||||
buildPlatforms: Array.from(buildPlatformsMap.values()),
|
||||
testPlatforms: Array.from(testPlatformsMap.values()),
|
||||
@@ -1032,12 +1013,11 @@ async function getPipeline(options = {}) {
|
||||
steps.push({
|
||||
key: "build-images",
|
||||
group: getBuildkiteEmoji("aws"),
|
||||
steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, options)),
|
||||
steps: [...imagePlatforms.values()].map(platform => getBuildImageStep(platform, !publishImages)),
|
||||
});
|
||||
}
|
||||
|
||||
let { skipBuilds, forceBuilds, unifiedBuilds, dryRun } = options;
|
||||
dryRun = dryRun || !!buildImages;
|
||||
const { skipBuilds, forceBuilds, unifiedBuilds } = options;
|
||||
|
||||
/** @type {string | undefined} */
|
||||
let buildId;
|
||||
@@ -1057,21 +1037,22 @@ async function getPipeline(options = {}) {
|
||||
.flatMap(platform => buildProfiles.map(profile => ({ ...platform, profile })))
|
||||
.map(target => {
|
||||
const imageKey = getImageKey(target);
|
||||
const imagePlatform = imagePlatforms.get(imageKey);
|
||||
|
||||
return getStepWithDependsOn(
|
||||
{
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps: unifiedBuilds
|
||||
? [getBuildBunStep(target, options)]
|
||||
? [getBuildBunStep(target, !!buildImages)]
|
||||
: [
|
||||
getBuildVendorStep(target, options),
|
||||
getBuildCppStep(target, options),
|
||||
getBuildZigStep(target, options),
|
||||
getLinkBunStep(target, options),
|
||||
getBuildVendorStep(target, !!buildImages),
|
||||
getBuildCppStep(target, !!buildImages),
|
||||
getBuildZigStep(target, !!buildImages),
|
||||
getLinkBunStep(target, !!buildImages),
|
||||
],
|
||||
},
|
||||
imagePlatforms.has(imageKey) ? `${imageKey}-build-image` : undefined,
|
||||
imagePlatform ? `${imageKey}-build-image` : undefined,
|
||||
);
|
||||
}),
|
||||
);
|
||||
@@ -1086,14 +1067,14 @@ async function getPipeline(options = {}) {
|
||||
.map(target => ({
|
||||
key: getTargetKey(target),
|
||||
group: getTargetLabel(target),
|
||||
steps: [getTestBunStep(target, options, { unifiedTests, testFiles, buildId })],
|
||||
steps: [getTestBunStep(target, { unifiedTests, testFiles, buildId, dryRun: !!buildImages })],
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (isMainBranch()) {
|
||||
steps.push(getReleaseStep(buildPlatforms, options));
|
||||
steps.push(getReleaseStep(buildPlatforms));
|
||||
}
|
||||
|
||||
/** @type {Map<string, GroupStep>} */
|
||||
|
||||
11
.buildkite/scripts/prepare-build.sh
Executable file
11
.buildkite/scripts/prepare-build.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
run_command node ".buildkite/ci.mjs" "$@"
|
||||
@@ -3,6 +3,10 @@
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [ "$RELEASE" == "1" ]; then
|
||||
echo "info: Skipping canary release because this is a release build"
|
||||
exit 0
|
||||
fi
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
@@ -190,6 +194,8 @@ function create_release() {
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
@@ -231,7 +237,8 @@ function create_release() {
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
|
||||
local canary="$(buildkite-agent meta-data get canary 2>/dev/null)"
|
||||
if [ -z "$canary" ] || [ "$canary" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
3
.clangd
3
.clangd
@@ -3,6 +3,3 @@ Index:
|
||||
|
||||
CompileFlags:
|
||||
CompilationDatabase: build/debug
|
||||
|
||||
Diagnostics:
|
||||
UnusedIncludes: None
|
||||
|
||||
@@ -16,6 +16,3 @@ zig-out
|
||||
build
|
||||
vendor
|
||||
node_modules
|
||||
*.trace
|
||||
|
||||
packages/bun-uws/fuzzing
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -116,10 +116,8 @@ scripts/env.local
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bake/generated.ts
|
||||
src/generated_enum_extractor.zig
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
|
||||
58
.vscode/launch.json
generated
vendored
58
.vscode/launch.json
generated
vendored
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -38,7 +38,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -60,7 +60,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -76,7 +76,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -92,7 +92,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -108,7 +108,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -125,7 +125,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -147,7 +147,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -169,7 +169,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -188,7 +188,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -203,7 +203,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -221,7 +221,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -236,7 +236,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -253,7 +253,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -275,7 +275,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -297,7 +297,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -313,7 +313,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -329,7 +329,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -345,7 +345,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -361,7 +361,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -378,7 +378,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -400,7 +400,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -421,7 +421,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
// bun test [*]
|
||||
{
|
||||
@@ -437,7 +437,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -452,7 +452,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -468,7 +468,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
"serverReadyAction": {
|
||||
"pattern": "https://debug.bun.sh/#localhost:([0-9]+)/",
|
||||
"uriFormat": "https://debug.bun.sh/#ws://localhost:%s/",
|
||||
@@ -488,7 +488,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
@@ -503,7 +503,7 @@
|
||||
},
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
// Windows: bun test [file]
|
||||
{
|
||||
@@ -1125,7 +1125,7 @@
|
||||
],
|
||||
"console": "internalConsole",
|
||||
// Don't pause when the GC runs while the debugger is open.
|
||||
"postRunCommands": ["command source '${workspaceFolder}/misctools/lldb/lldb_commands'"],
|
||||
"postRunCommands": ["process handle -p true -s false -n false SIGUSR1"],
|
||||
},
|
||||
],
|
||||
"inputs": [
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -63,6 +63,7 @@
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"clangd.arguments": ["-header-insertion=never"],
|
||||
|
||||
// JavaScript
|
||||
"prettier.enable": true,
|
||||
|
||||
@@ -2,6 +2,11 @@ Configuring a development environment for Bun can take 10-30 minutes depending o
|
||||
|
||||
If you are using Windows, please refer to [this guide](/docs/project/building-windows.md)
|
||||
|
||||
{% details summary="For Ubuntu users" %}
|
||||
TL;DR: Ubuntu 22.04 is suggested.
|
||||
Bun currently requires `glibc >=2.32` in development which means if you're on Ubuntu 20.04 (glibc == 2.31), you may likely meet `error: undefined symbol: __libc_single_threaded `. You need to take extra configurations. Also, according to this [issue](https://github.com/llvm/llvm-project/issues/97314), LLVM 16 is no longer maintained on Ubuntu 24.04 (noble). And instead, you might want `brew` to install LLVM 16 for your Ubuntu 24.04.
|
||||
{% /details %}
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
@@ -53,7 +58,7 @@ $ brew install bun
|
||||
|
||||
## Install LLVM
|
||||
|
||||
Bun requires LLVM 18 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
Bun requires LLVM 16 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
@@ -84,7 +89,7 @@ $ sudo zypper install clang16 lld16 llvm16
|
||||
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-16.0.6).
|
||||
|
||||
Make sure Clang/LLVM 18 is in your path:
|
||||
Make sure Clang/LLVM 16 is in your path:
|
||||
|
||||
```bash
|
||||
$ which clang-16
|
||||
|
||||
47
build.zig
47
build.zig
@@ -327,25 +327,6 @@ pub fn build(b: *Build) !void {
|
||||
.{ .os = .windows, .arch = .x86_64 },
|
||||
});
|
||||
}
|
||||
|
||||
// zig build translate-c-headers
|
||||
{
|
||||
const step = b.step("translate-c", "Copy generated translated-c-headers.zig to zig-out");
|
||||
step.dependOn(&b.addInstallFile(getTranslateC(b, b.host, .Debug).getOutput(), "translated-c-headers.zig").step);
|
||||
}
|
||||
|
||||
// zig build enum-extractor
|
||||
{
|
||||
// const step = b.step("enum-extractor", "Extract enum definitions (invoked by a code generator)");
|
||||
// const exe = b.addExecutable(.{
|
||||
// .name = "enum_extractor",
|
||||
// .root_source_file = b.path("./src/generated_enum_extractor.zig"),
|
||||
// .target = b.graph.host,
|
||||
// .optimize = .Debug,
|
||||
// });
|
||||
// const run = b.addRunArtifact(exe);
|
||||
// step.dependOn(&run.step);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addMultiCheck(
|
||||
@@ -386,25 +367,6 @@ pub fn addMultiCheck(
|
||||
}
|
||||
}
|
||||
|
||||
fn getTranslateC(b: *Build, target: std.Build.ResolvedTarget, optimize: std.builtin.OptimizeMode) *Step.TranslateC {
|
||||
const translate_c = b.addTranslateC(.{
|
||||
.root_source_file = b.path("src/c-headers-for-zig.h"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
.link_libc = true,
|
||||
});
|
||||
inline for ([_](struct { []const u8, bool }){
|
||||
.{ "WINDOWS", translate_c.target.result.os.tag == .windows },
|
||||
.{ "POSIX", translate_c.target.result.os.tag != .windows },
|
||||
.{ "LINUX", translate_c.target.result.os.tag == .linux },
|
||||
.{ "DARWIN", translate_c.target.result.os.tag.isDarwin() },
|
||||
}) |entry| {
|
||||
const str, const value = entry;
|
||||
translate_c.defineCMacroRaw(b.fmt("{s}={d}", .{ str, @intFromBool(value) }));
|
||||
}
|
||||
return translate_c;
|
||||
}
|
||||
|
||||
pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
const obj = b.addObject(.{
|
||||
.name = if (opts.optimize == .Debug) "bun-debug" else "bun",
|
||||
@@ -453,8 +415,13 @@ pub fn addBunObject(b: *Build, opts: *BunBuildOptions) *Compile {
|
||||
addInternalPackages(b, obj, opts);
|
||||
obj.root_module.addImport("build_options", opts.buildOptionsModule(b));
|
||||
|
||||
const translate_c = getTranslateC(b, opts.target, opts.optimize);
|
||||
obj.root_module.addImport("translated-c-headers", translate_c.createModule());
|
||||
const translate_plugin_api = b.addTranslateC(.{
|
||||
.root_source_file = b.path("./packages/bun-native-bundler-plugin-api/bundler_plugin.h"),
|
||||
.target = opts.target,
|
||||
.optimize = opts.optimize,
|
||||
.link_libc = true,
|
||||
});
|
||||
obj.root_module.addImport("bun-native-bundler-plugin-api", translate_plugin_api.createModule());
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
@@ -176,10 +176,6 @@ if(LINUX)
|
||||
DESCRIPTION "Disable relocation read-only (RELRO)"
|
||||
-Wl,-z,norelro
|
||||
)
|
||||
register_compiler_flags(
|
||||
DESCRIPTION "Disable semantic interposition"
|
||||
-fno-semantic-interposition
|
||||
)
|
||||
endif()
|
||||
|
||||
# --- Assertions ---
|
||||
|
||||
@@ -291,7 +291,7 @@ function(find_command)
|
||||
set_property(GLOBAL PROPERTY ${FIND_NAME} "${exe}: ${reason}" APPEND)
|
||||
|
||||
if(version)
|
||||
satisfies_range(${version} ${FIND_VERSION} ${variable})
|
||||
satisfies_range(${version} ${${FIND_VERSION_VARIABLE}} ${variable})
|
||||
set(${variable} ${${variable}} PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
@@ -67,7 +67,13 @@ optionx(ENABLE_ASSERTIONS BOOL "If debug assertions should be enabled" DEFAULT $
|
||||
|
||||
optionx(ENABLE_CANARY BOOL "If canary features should be enabled" DEFAULT ON)
|
||||
|
||||
if(ENABLE_CANARY)
|
||||
if(ENABLE_CANARY AND BUILDKITE)
|
||||
execute_process(
|
||||
COMMAND buildkite-agent meta-data get "canary"
|
||||
OUTPUT_VARIABLE DEFAULT_CANARY_REVISION
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
elseif(ENABLE_CANARY)
|
||||
set(DEFAULT_CANARY_REVISION "1")
|
||||
else()
|
||||
set(DEFAULT_CANARY_REVISION "0")
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
oven-sh/boringssl
|
||||
COMMIT
|
||||
914b005ef3ece44159dca0ffad74eb42a9f6679f
|
||||
033d4e8f4fd862b15dc101db6432aadfed670255
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -318,13 +318,13 @@ register_command(
|
||||
TARGET
|
||||
bun-bake-codegen
|
||||
COMMENT
|
||||
"Bundling Bake Runtime"
|
||||
"Bundling Kit Runtime"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
--codegen-root=${CODEGEN_PATH}
|
||||
--codegen_root=${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_BAKE_RUNTIME_SOURCES}
|
||||
${BUN_BAKE_RUNTIME_CODEGEN_SOURCES}
|
||||
@@ -334,39 +334,6 @@ register_command(
|
||||
${BUN_BAKE_RUNTIME_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_SCRIPT ${CWD}/src/codegen/bindgen.ts)
|
||||
|
||||
file(GLOB_RECURSE BUN_BINDGEN_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/**/*.bind.ts
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_CPP_OUTPUTS
|
||||
${CODEGEN_PATH}/GeneratedBindings.cpp
|
||||
)
|
||||
|
||||
set(BUN_BINDGEN_ZIG_OUTPUTS
|
||||
${CWD}/src/bun.js/bindings/GeneratedBindings.zig
|
||||
)
|
||||
|
||||
register_command(
|
||||
TARGET
|
||||
bun-binding-generator
|
||||
COMMENT
|
||||
"Processing \".bind.ts\" files"
|
||||
COMMAND
|
||||
${BUN_EXECUTABLE}
|
||||
run
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
--debug=${DEBUG}
|
||||
--codegen-root=${CODEGEN_PATH}
|
||||
SOURCES
|
||||
${BUN_BINDGEN_SOURCES}
|
||||
${BUN_BINDGEN_SCRIPT}
|
||||
OUTPUTS
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_JS_SINK_SCRIPT ${CWD}/src/codegen/generate-jssink.ts)
|
||||
|
||||
set(BUN_JS_SINK_SOURCES
|
||||
@@ -418,6 +385,7 @@ set(BUN_OBJECT_LUT_OUTPUTS
|
||||
${CODEGEN_PATH}/NodeModuleModule.lut.h
|
||||
)
|
||||
|
||||
|
||||
macro(WEBKIT_ADD_SOURCE_DEPENDENCIES _source _deps)
|
||||
set(_tmp)
|
||||
get_source_file_property(_tmp ${_source} OBJECT_DEPENDS)
|
||||
@@ -493,7 +461,6 @@ list(APPEND BUN_ZIG_SOURCES
|
||||
${CWD}/build.zig
|
||||
${CWD}/root.zig
|
||||
${CWD}/root_wasm.zig
|
||||
${BUN_BINDGEN_ZIG_OUTPUTS}
|
||||
)
|
||||
|
||||
set(BUN_ZIG_GENERATED_SOURCES
|
||||
@@ -515,6 +482,7 @@ endif()
|
||||
|
||||
set(BUN_ZIG_OUTPUT ${BUILD_PATH}/bun-zig.o)
|
||||
|
||||
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(APPLE)
|
||||
set(ZIG_CPU "apple_m1")
|
||||
@@ -576,7 +544,6 @@ set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS "build.zig")
|
||||
|
||||
set(BUN_USOCKETS_SOURCE ${CWD}/packages/bun-usockets)
|
||||
|
||||
# hand written cpp source files. Full list of "source" code (including codegen) is in BUN_CPP_SOURCES
|
||||
file(GLOB BUN_CXX_SOURCES ${CONFIGURE_DEPENDS}
|
||||
${CWD}/src/io/*.cpp
|
||||
${CWD}/src/bun.js/modules/*.cpp
|
||||
@@ -601,8 +568,6 @@ file(GLOB BUN_C_SOURCES ${CONFIGURE_DEPENDS}
|
||||
|
||||
if(WIN32)
|
||||
list(APPEND BUN_C_SOURCES ${CWD}/src/bun.js/bindings/windows/musl-memmem.c)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle.cpp)
|
||||
list(APPEND BUN_CXX_SOURCES ${CWD}/src/bun.js/bindings/windows/rescle-binding.cpp)
|
||||
endif()
|
||||
|
||||
register_repository(
|
||||
@@ -635,14 +600,12 @@ register_command(
|
||||
list(APPEND BUN_CPP_SOURCES
|
||||
${BUN_C_SOURCES}
|
||||
${BUN_CXX_SOURCES}
|
||||
${BUN_ERROR_CODE_OUTPUTS}
|
||||
${VENDOR_PATH}/picohttpparser/picohttpparser.c
|
||||
${NODEJS_HEADERS_PATH}/include/node/node_version.h
|
||||
${BUN_ZIG_GENERATED_CLASSES_OUTPUTS}
|
||||
${BUN_JS_SINK_OUTPUTS}
|
||||
${BUN_JAVASCRIPT_OUTPUTS}
|
||||
${BUN_OBJECT_LUT_OUTPUTS}
|
||||
${BUN_BINDGEN_CPP_OUTPUTS}
|
||||
)
|
||||
|
||||
if(WIN32)
|
||||
@@ -652,19 +615,11 @@ if(WIN32)
|
||||
set(Bun_VERSION_WITH_TAG ${VERSION})
|
||||
endif()
|
||||
set(BUN_ICO_PATH ${CWD}/src/bun.ico)
|
||||
configure_file(${CWD}/src/bun.ico ${CODEGEN_PATH}/bun.ico COPYONLY)
|
||||
configure_file(
|
||||
${CWD}/src/windows-app-info.rc
|
||||
${CODEGEN_PATH}/windows-app-info.rc
|
||||
@ONLY
|
||||
)
|
||||
add_custom_command(
|
||||
OUTPUT ${CODEGEN_PATH}/windows-app-info.res
|
||||
COMMAND rc.exe /fo ${CODEGEN_PATH}/windows-app-info.res ${CODEGEN_PATH}/windows-app-info.rc
|
||||
DEPENDS ${CODEGEN_PATH}/windows-app-info.rc ${CODEGEN_PATH}/bun.ico
|
||||
COMMENT "Adding Windows resource file ${CODEGEN_PATH}/windows-app-info.res with ico in ${CODEGEN_PATH}/bun.ico"
|
||||
)
|
||||
set(WINDOWS_RESOURCES ${CODEGEN_PATH}/windows-app-info.res)
|
||||
list(APPEND BUN_CPP_SOURCES ${CODEGEN_PATH}/windows-app-info.rc)
|
||||
endif()
|
||||
|
||||
# --- Executable ---
|
||||
@@ -672,7 +627,7 @@ endif()
|
||||
set(BUN_CPP_OUTPUT ${BUILD_PATH}/${CMAKE_STATIC_LIBRARY_PREFIX}${bun}${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
|
||||
if(BUN_LINK_ONLY)
|
||||
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT} ${WINDOWS_RESOURCES})
|
||||
add_executable(${bun} ${BUN_CPP_OUTPUT} ${BUN_ZIG_OUTPUT})
|
||||
set_target_properties(${bun} PROPERTIES LINKER_LANGUAGE CXX)
|
||||
target_link_libraries(${bun} PRIVATE ${BUN_CPP_OUTPUT})
|
||||
elseif(BUN_CPP_ONLY)
|
||||
@@ -690,7 +645,7 @@ elseif(BUN_CPP_ONLY)
|
||||
${BUN_CPP_OUTPUT}
|
||||
)
|
||||
else()
|
||||
add_executable(${bun} ${BUN_CPP_SOURCES} ${WINDOWS_RESOURCES})
|
||||
add_executable(${bun} ${BUN_CPP_SOURCES})
|
||||
target_link_libraries(${bun} PRIVATE ${BUN_ZIG_OUTPUT})
|
||||
endif()
|
||||
|
||||
@@ -896,28 +851,48 @@ endif()
|
||||
|
||||
if(LINUX)
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
# on arm64
|
||||
if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm|ARM|arm64|ARM64|aarch64|AARCH64")
|
||||
if(ARCH STREQUAL "aarch64")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
endif()
|
||||
|
||||
if(ARCH STREQUAL "x64")
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=fcntl
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=fstat
|
||||
-Wl,--wrap=fstat64
|
||||
-Wl,--wrap=fstatat
|
||||
-Wl,--wrap=fstatat64
|
||||
-Wl,--wrap=lstat
|
||||
-Wl,--wrap=lstat64
|
||||
-Wl,--wrap=mknod
|
||||
-Wl,--wrap=mknodat
|
||||
-Wl,--wrap=stat
|
||||
-Wl,--wrap=stat64
|
||||
-Wl,--wrap=statx
|
||||
)
|
||||
endif()
|
||||
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=cosf
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=fcntl64
|
||||
-Wl,--wrap=fmod
|
||||
-Wl,--wrap=fmodf
|
||||
-Wl,--wrap=log
|
||||
-Wl,--wrap=log10f
|
||||
-Wl,--wrap=log2
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=pow
|
||||
-Wl,--wrap=powf
|
||||
-Wl,--wrap=sincosf
|
||||
-Wl,--wrap=sinf
|
||||
-Wl,--wrap=tanf
|
||||
)
|
||||
else()
|
||||
target_link_options(${bun} PUBLIC
|
||||
-Wl,--wrap=exp
|
||||
-Wl,--wrap=expf
|
||||
-Wl,--wrap=log2f
|
||||
-Wl,--wrap=logf
|
||||
-Wl,--wrap=powf
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT ABI STREQUAL "musl")
|
||||
@@ -946,7 +921,7 @@ if(LINUX)
|
||||
-Wl,-z,combreloc
|
||||
-Wl,--no-eh-frame-hdr
|
||||
-Wl,--sort-section=name
|
||||
-Wl,--hash-style=both
|
||||
-Wl,--hash-style=gnu
|
||||
-Wl,--build-id=sha1 # Better for debugging than default
|
||||
-Wl,-Map=${bun}.linker-map
|
||||
)
|
||||
@@ -958,7 +933,6 @@ if(WIN32)
|
||||
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.def)
|
||||
target_link_options(${bun} PUBLIC /DEF:${BUN_SYMBOLS_PATH})
|
||||
elseif(APPLE)
|
||||
|
||||
set(BUN_SYMBOLS_PATH ${CWD}/src/symbols.txt)
|
||||
target_link_options(${bun} PUBLIC -exported_symbols_list ${BUN_SYMBOLS_PATH})
|
||||
else()
|
||||
|
||||
@@ -4,7 +4,7 @@ register_repository(
|
||||
REPOSITORY
|
||||
c-ares/c-ares
|
||||
COMMIT
|
||||
4f4912bce7374f787b10576851b687935f018e17
|
||||
41ee334af3e3d0027dca5e477855d0244936bd49
|
||||
)
|
||||
|
||||
register_cmake_command(
|
||||
|
||||
@@ -18,7 +18,7 @@ register_cmake_command(
|
||||
-DENABLE_INSTALL=OFF
|
||||
-DENABLE_TEST=OFF
|
||||
-DENABLE_WERROR=OFF
|
||||
-DENABLE_BZip2=OFF
|
||||
-DENABLE_BZIP2=OFF
|
||||
-DENABLE_CAT=OFF
|
||||
-DENABLE_EXPAT=OFF
|
||||
-DENABLE_ICONV=OFF
|
||||
|
||||
@@ -5,11 +5,6 @@ if(NOT ENABLE_CCACHE OR CACHE_STRATEGY STREQUAL "none")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (CI AND NOT APPLE)
|
||||
setenv(CCACHE_DISABLE 1)
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_command(
|
||||
VARIABLE
|
||||
CCACHE_PROGRAM
|
||||
@@ -43,8 +38,7 @@ setenv(CCACHE_FILECLONE 1)
|
||||
setenv(CCACHE_STATSLOG ${BUILD_PATH}/ccache.log)
|
||||
|
||||
if(CI)
|
||||
# FIXME: Does not work on Ubuntu 18.04
|
||||
# setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
|
||||
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,clang_index_store,gcno_cwd,include_file_ctime,include_file_mtime")
|
||||
else()
|
||||
setenv(CCACHE_SLOPPINESS "pch_defines,time_macros,locale,random_seed,clang_index_store,gcno_cwd")
|
||||
endif()
|
||||
|
||||
@@ -1,18 +1,14 @@
|
||||
|
||||
set(DEFAULT_ENABLE_LLVM ON)
|
||||
|
||||
# if target is bun-zig, set ENABLE_LLVM to OFF
|
||||
if(TARGET bun-zig)
|
||||
set(DEFAULT_ENABLE_LLVM OFF)
|
||||
endif()
|
||||
|
||||
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ${DEFAULT_ENABLE_LLVM})
|
||||
optionx(ENABLE_LLVM BOOL "If LLVM should be used for compilation" DEFAULT ON)
|
||||
|
||||
if(NOT ENABLE_LLVM)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
if(CMAKE_HOST_WIN32 OR CMAKE_HOST_APPLE OR EXISTS "/etc/alpine-release")
|
||||
set(DEFAULT_LLVM_VERSION "18.1.8")
|
||||
else()
|
||||
set(DEFAULT_LLVM_VERSION "16.0.6")
|
||||
endif()
|
||||
|
||||
optionx(LLVM_VERSION STRING "The version of LLVM to use" DEFAULT ${DEFAULT_LLVM_VERSION})
|
||||
|
||||
@@ -77,7 +73,7 @@ macro(find_llvm_command variable command)
|
||||
VERSION_VARIABLE LLVM_VERSION
|
||||
COMMAND ${commands}
|
||||
PATHS ${LLVM_PATHS}
|
||||
VERSION >=${LLVM_VERSION_MAJOR}.1.0
|
||||
VERSION ${LLVM_VERSION}
|
||||
)
|
||||
list(APPEND CMAKE_ARGS -D${variable}=${${variable}})
|
||||
endmacro()
|
||||
|
||||
@@ -2,7 +2,7 @@ option(WEBKIT_VERSION "The version of WebKit to use")
|
||||
option(WEBKIT_LOCAL "If a local version of WebKit should be used instead of downloading")
|
||||
|
||||
if(NOT WEBKIT_VERSION)
|
||||
set(WEBKIT_VERSION 3845bf370ff4e9a5c0b96036255142c7904be963)
|
||||
set(WEBKIT_VERSION 8f9ae4f01a047c666ef548864294e01df731d4ea)
|
||||
endif()
|
||||
|
||||
if(WEBKIT_LOCAL)
|
||||
|
||||
@@ -234,7 +234,7 @@ To prefetch a DNS entry, you can use the `dns.prefetch` API. This API is useful
|
||||
```ts
|
||||
import { dns } from "bun";
|
||||
|
||||
dns.prefetch("bun.sh");
|
||||
dns.prefetch("bun.sh", 443);
|
||||
```
|
||||
|
||||
#### DNS caching
|
||||
|
||||
@@ -771,28 +771,3 @@ console.log(obj); // => { foo: "bar" }
|
||||
```
|
||||
|
||||
Internally, [`structuredClone`](https://developer.mozilla.org/en-US/docs/Web/API/structuredClone) and [`postMessage`](https://developer.mozilla.org/en-US/docs/Web/API/Window/postMessage) serialize and deserialize the same way. This exposes the underlying [HTML Structured Clone Algorithm](https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm) to JavaScript as an ArrayBuffer.
|
||||
|
||||
## `estimateShallowMemoryUsageOf` in `bun:jsc`
|
||||
|
||||
The `estimateShallowMemoryUsageOf` function returns a best-effort estimate of the memory usage of an object in bytes, excluding the memory usage of properties or other objects it references. For accurate per-object memory usage, use `Bun.generateHeapSnapshot`.
|
||||
|
||||
```js
|
||||
import { estimateShallowMemoryUsageOf } from "bun:jsc";
|
||||
|
||||
const obj = { foo: "bar" };
|
||||
const usage = estimateShallowMemoryUsageOf(obj);
|
||||
console.log(usage); // => 16
|
||||
|
||||
const buffer = Buffer.alloc(1024 * 1024);
|
||||
estimateShallowMemoryUsageOf(buffer);
|
||||
// => 1048624
|
||||
|
||||
const req = new Request("https://bun.sh");
|
||||
estimateShallowMemoryUsageOf(req);
|
||||
// => 167
|
||||
|
||||
const array = Array(1024).fill({ a: 1 });
|
||||
// Arrays are usually not stored contiguously in memory, so this will not return a useful value (which isn't a bug).
|
||||
estimateShallowMemoryUsageOf(array);
|
||||
// => 16
|
||||
```
|
||||
|
||||
@@ -279,19 +279,6 @@ $ bun build --compile --asset-naming="[name].[ext]" ./index.ts
|
||||
|
||||
To trim down the size of the executable a little, pass `--minify` to `bun build --compile`. This uses Bun's minifier to reduce the code size. Overall though, Bun's binary is still way too big and we need to make it smaller.
|
||||
|
||||
## Windows-specific flags
|
||||
|
||||
When compiling a standalone executable on Windows, there are two platform-specific options that can be used to customize metadata on the generated `.exe` file:
|
||||
|
||||
- `--windows-icon=path/to/icon.ico` to customize the executable file icon.
|
||||
- `--windows-hide-console` to disable the background terminal, which can be used for applications that do not need a TTY.
|
||||
|
||||
{% callout %}
|
||||
|
||||
These flags currently cannot be used when cross-compiling because they depend on Windows APIs.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## Unsupported CLI arguments
|
||||
|
||||
Currently, the `--compile` flag can only accept a single entrypoint at a time and does not support the following flags:
|
||||
|
||||
@@ -546,113 +546,6 @@ export type ImportKind =
|
||||
|
||||
By design, the manifest is a simple JSON object that can easily be serialized or written to disk. It is also compatible with esbuild's [`metafile`](https://esbuild.github.io/api/#metafile) format. -->
|
||||
|
||||
### `env`
|
||||
|
||||
Controls how environment variables are handled during bundling. Internally, this uses `define` to inject environment variables into the bundle, but makes it easier to specify the environment variables to inject.
|
||||
|
||||
#### `env: "inline"`
|
||||
|
||||
Injects environment variables into the bundled output by converting `process.env.FOO` references to string literals containing the actual environment variable values.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
env: "inline",
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ FOO=bar BAZ=123 bun build ./index.tsx --outdir ./out --env inline
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For the input below:
|
||||
|
||||
```js#input.js
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js#output.js
|
||||
console.log("bar");
|
||||
console.log("123");
|
||||
```
|
||||
|
||||
#### `env: "PUBLIC_*"` (prefix)
|
||||
|
||||
Inlines environment variables matching the given prefix (the part before the `*` character), replacing `process.env.FOO` with the actual environment variable value. This is useful for selectively inlining environment variables for things like public-facing URLs or client-side tokens, without worrying about injecting private credentials into output bundles.
|
||||
|
||||
{% codetabs group="a" %}
|
||||
|
||||
```ts#JavaScript
|
||||
await Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
outdir: './out',
|
||||
|
||||
// Inline all env vars that start with "ACME_PUBLIC_"
|
||||
env: "ACME_PUBLIC_*",
|
||||
})
|
||||
```
|
||||
|
||||
```bash#CLI
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com bun build ./index.tsx --outdir ./out --env 'ACME_PUBLIC_*'
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
For example, given the following environment variables:
|
||||
|
||||
```bash
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
|
||||
```
|
||||
|
||||
And source code:
|
||||
|
||||
```ts#index.tsx
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.ACME_PUBLIC_URL);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js
|
||||
console.log(process.env.FOO);
|
||||
console.log("https://acme.com");
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
#### `env: "disable"`
|
||||
|
||||
Disables environment variable injection entirely.
|
||||
|
||||
For example, given the following environment variables:
|
||||
|
||||
```bash
|
||||
$ FOO=bar BAZ=123 ACME_PUBLIC_URL=https://acme.com
|
||||
```
|
||||
|
||||
And source code:
|
||||
|
||||
```ts#index.tsx
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.ACME_PUBLIC_URL);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
The generated bundle will contain the following code:
|
||||
|
||||
```js
|
||||
console.log(process.env.FOO);
|
||||
console.log(process.env.BAZ);
|
||||
```
|
||||
|
||||
### `sourcemap`
|
||||
|
||||
Specifies the type of sourcemap to generate.
|
||||
@@ -1259,7 +1152,7 @@ $ bun build ./index.tsx --outdir ./out --drop=console --drop=debugger --drop=any
|
||||
|
||||
### `experimentalCss`
|
||||
|
||||
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`. In 1.2, this property will be deleted, and CSS bundling will always be enabled.
|
||||
Whether to enable _experimental_ support for bundling CSS files. Defaults to `false`.
|
||||
|
||||
This supports bundling CSS files imported from JS, as well as CSS entrypoints.
|
||||
|
||||
@@ -1275,12 +1168,6 @@ const result = await Bun.build({
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
### `throw`
|
||||
|
||||
If set to `true`, `Bun.build` will throw on build failure. See the section ["Logs and Errors"](#logs-and-errors) for more details on the error message structure.
|
||||
|
||||
In 1.2, this will default to `true`, with the previous behavior as `throw: false`
|
||||
|
||||
## Outputs
|
||||
|
||||
The `Bun.build` function returns a `Promise<BuildOutput>`, defined as:
|
||||
@@ -1420,70 +1307,7 @@ Refer to [Bundler > Executables](https://bun.sh/docs/bundler/executables) for co
|
||||
|
||||
## Logs and errors
|
||||
|
||||
<!-- 1.2 documentation -->
|
||||
<!-- On failure, `Bun.build` returns a rejected promise with an `AggregateError`. This can be logged to the console for pretty printing of the error list, or programmatically read with a `try`/`catch` block.
|
||||
|
||||
```ts
|
||||
try {
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outdir: "./out",
|
||||
});
|
||||
} catch (e) {
|
||||
// TypeScript does not allow annotations on the catch clause
|
||||
const error = e as AggregateError;
|
||||
console.error("Build Failed");
|
||||
|
||||
// Example: Using the built-in formatter
|
||||
console.error(error);
|
||||
|
||||
// Example: Serializing the failure as a JSON string.
|
||||
console.error(JSON.stringify(error, null, 2));
|
||||
}
|
||||
```
|
||||
|
||||
{% callout %}
|
||||
|
||||
Most of the time, an explicit `try`/`catch` is not needed, as Bun will neatly print uncaught exceptions. It is enough to just use a top-level `await` on the `Bun.build` call.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
Each item in `error.errors` is an instance of `BuildMessage` or `ResolveMessage` (subclasses of Error), containing detailed information for each error.
|
||||
|
||||
```ts
|
||||
class BuildMessage {
|
||||
name: string;
|
||||
position?: Position;
|
||||
message: string;
|
||||
level: "error" | "warning" | "info" | "debug" | "verbose";
|
||||
}
|
||||
|
||||
class ResolveMessage extends BuildMessage {
|
||||
code: string;
|
||||
referrer: string;
|
||||
specifier: string;
|
||||
importKind: ImportKind;
|
||||
}
|
||||
```
|
||||
|
||||
On build success, the returned object contains a `logs` property, which contains bundler warnings and info messages.
|
||||
|
||||
```ts
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outdir: "./out",
|
||||
});
|
||||
|
||||
if (result.logs.length > 0) {
|
||||
console.warn("Build succeeded with warnings:");
|
||||
for (const message of result.logs) {
|
||||
// Bun will pretty print the message object
|
||||
console.warn(message);
|
||||
}
|
||||
}
|
||||
``` -->
|
||||
|
||||
By default, `Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
|
||||
`Bun.build` only throws if invalid options are provided. Read the `success` property to determine if the build was successful; the `logs` property will contain additional details.
|
||||
|
||||
```ts
|
||||
const result = await Bun.build({
|
||||
@@ -1526,27 +1350,6 @@ if (!result.success) {
|
||||
}
|
||||
```
|
||||
|
||||
In Bun 1.2, throwing an aggregate error like this will become the default beahavior. You can opt-into it early using the `throw: true` option.
|
||||
|
||||
```ts
|
||||
try {
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
outdir: "./out",
|
||||
});
|
||||
} catch (e) {
|
||||
// TypeScript does not allow annotations on the catch clause
|
||||
const error = e as AggregateError;
|
||||
console.error("Build Failed");
|
||||
|
||||
// Example: Using the built-in formatter
|
||||
console.error(error);
|
||||
|
||||
// Example: Serializing the failure as a JSON string.
|
||||
console.error(JSON.stringify(error, null, 2));
|
||||
}
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
```ts
|
||||
@@ -1568,23 +1371,39 @@ interface BuildConfig {
|
||||
*
|
||||
* @default "esm"
|
||||
*/
|
||||
format?: "esm" | "cjs" | "iife";
|
||||
format?: /**
|
||||
|
||||
* ECMAScript Module format
|
||||
*/
|
||||
| "esm"
|
||||
/**
|
||||
* CommonJS format
|
||||
* **Experimental**
|
||||
*/
|
||||
| "cjs"
|
||||
/**
|
||||
* IIFE format
|
||||
* **Experimental**
|
||||
*/
|
||||
| "iife";
|
||||
naming?:
|
||||
| string
|
||||
| {
|
||||
chunk?: string;
|
||||
entry?: string;
|
||||
asset?: string;
|
||||
};
|
||||
}; // | string;
|
||||
root?: string; // project root
|
||||
splitting?: boolean; // default true, enable code splitting
|
||||
plugins?: BunPlugin[];
|
||||
// manifest?: boolean; // whether to return manifest
|
||||
external?: string[];
|
||||
packages?: "bundle" | "external";
|
||||
publicPath?: string;
|
||||
define?: Record<string, string>;
|
||||
// origin?: string; // e.g. http://mydomain.com
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
@@ -1593,18 +1412,6 @@ interface BuildConfig {
|
||||
* https://nodejs.org/api/packages.html#exports
|
||||
*/
|
||||
conditions?: Array<string> | string;
|
||||
|
||||
/**
|
||||
* Controls how environment variables are handled during bundling.
|
||||
*
|
||||
* Can be one of:
|
||||
* - `"inline"`: Injects environment variables into the bundled output by converting `process.env.FOO`
|
||||
* references to string literals containing the actual environment variable values
|
||||
* - `"disable"`: Disables environment variable injection entirely
|
||||
* - A string ending in `*`: Inlines environment variables that match the given prefix.
|
||||
* For example, `"MY_PUBLIC_*"` will only include env vars starting with "MY_PUBLIC_"
|
||||
*/
|
||||
env?: "inline" | "disable" | `${string}*`;
|
||||
minify?:
|
||||
| boolean
|
||||
| {
|
||||
@@ -1622,6 +1429,20 @@ interface BuildConfig {
|
||||
* Force emitting @__PURE__ annotations even if minify.whitespace is true.
|
||||
*/
|
||||
emitDCEAnnotations?: boolean;
|
||||
// treeshaking?: boolean;
|
||||
|
||||
// jsx?:
|
||||
// | "automatic"
|
||||
// | "classic"
|
||||
// | /* later: "preserve" */ {
|
||||
// runtime?: "automatic" | "classic"; // later: "preserve"
|
||||
// /** Only works when runtime=classic */
|
||||
// factory?: string; // default: "React.createElement"
|
||||
// /** Only works when runtime=classic */
|
||||
// fragment?: string; // default: "React.Fragment"
|
||||
// /** Only works when runtime=automatic */
|
||||
// importSource?: string; // default: "react"
|
||||
// };
|
||||
|
||||
/**
|
||||
* Generate bytecode for the output. This can dramatically improve cold
|
||||
@@ -1634,37 +1455,6 @@ interface BuildConfig {
|
||||
* @default false
|
||||
*/
|
||||
bytecode?: boolean;
|
||||
/**
|
||||
* Add a banner to the bundled code such as "use client";
|
||||
*/
|
||||
banner?: string;
|
||||
/**
|
||||
* Add a footer to the bundled code such as a comment block like
|
||||
*
|
||||
* `// made with bun!`
|
||||
*/
|
||||
footer?: string;
|
||||
|
||||
/**
|
||||
* **Experimental**
|
||||
*
|
||||
* Enable CSS support.
|
||||
*/
|
||||
experimentalCss?: boolean;
|
||||
|
||||
/**
|
||||
* Drop function calls to matching property accesses.
|
||||
*/
|
||||
drop?: string[];
|
||||
|
||||
/**
|
||||
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
|
||||
*
|
||||
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
|
||||
* as most usage of `Bun.build` forgets to check for errors.
|
||||
*/
|
||||
throw?: boolean;
|
||||
}
|
||||
|
||||
interface BuildOutput {
|
||||
@@ -1722,3 +1512,32 @@ declare class ResolveMessage {
|
||||
toString(): string;
|
||||
}
|
||||
```
|
||||
|
||||
<!--
|
||||
interface BuildManifest {
|
||||
inputs: {
|
||||
[path: string]: {
|
||||
output: {
|
||||
path: string;
|
||||
};
|
||||
imports: {
|
||||
path: string;
|
||||
kind: ImportKind;
|
||||
external?: boolean;
|
||||
asset?: boolean; // whether the import defaulted to "file" loader
|
||||
}[];
|
||||
};
|
||||
};
|
||||
outputs: {
|
||||
[path: string]: {
|
||||
type: "chunk" | "entrypoint" | "asset";
|
||||
inputs: { path: string }[];
|
||||
imports: {
|
||||
path: string;
|
||||
kind: ImportKind;
|
||||
external?: boolean;
|
||||
}[];
|
||||
exports: string[];
|
||||
};
|
||||
};
|
||||
} -->
|
||||
|
||||
@@ -2,47 +2,11 @@ Bun provides a universal plugin API that can be used to extend both the _runtime
|
||||
|
||||
Plugins intercept imports and perform custom loading logic: reading files, transpiling code, etc. They can be used to add support for additional file types, like `.scss` or `.yaml`. In the context of Bun's bundler, plugins can be used to implement framework-level features like CSS extraction, macros, and client-server code co-location.
|
||||
|
||||
## Lifecycle hooks
|
||||
|
||||
Plugins can register callbacks to be run at various points in the lifecycle of a bundle:
|
||||
|
||||
- [`onStart()`](#onstart): Run once the bundler has started a bundle
|
||||
- [`onResolve()`](#onresolve): Run before a module is resolved
|
||||
- [`onLoad()`](#onload): Run before a module is loaded.
|
||||
- [`onBeforeParse()`](#onbeforeparse): Run zero-copy native addons in the parser thread before a file is parsed.
|
||||
|
||||
### Reference
|
||||
|
||||
A rough overview of the types (please refer to Bun's `bun.d.ts` for the full type definitions):
|
||||
|
||||
```ts
|
||||
type PluginBuilder = {
|
||||
onStart(callback: () => void): void;
|
||||
onResolve: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
path: string;
|
||||
namespace?: string;
|
||||
} | void,
|
||||
) => void;
|
||||
onLoad: (
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
defer: () => Promise<void>,
|
||||
callback: (args: { path: string }) => {
|
||||
loader?: Loader;
|
||||
contents?: string;
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
) => void;
|
||||
config: BuildConfig;
|
||||
};
|
||||
|
||||
type Loader = "js" | "jsx" | "ts" | "tsx" | "css" | "json" | "toml";
|
||||
```
|
||||
For more complete documentation of the Plugin API, see [Runtime > Plugins](https://bun.sh/docs/runtime/plugins).
|
||||
|
||||
## Usage
|
||||
|
||||
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function.
|
||||
A plugin is defined as simple JavaScript object containing a `name` property and a `setup` function. Register a plugin with Bun using the `plugin` function.
|
||||
|
||||
```tsx#myPlugin.ts
|
||||
import type { BunPlugin } from "bun";
|
||||
@@ -58,343 +22,9 @@ const myPlugin: BunPlugin = {
|
||||
This plugin can be passed into the `plugins` array when calling `Bun.build`.
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./out",
|
||||
plugins: [myPlugin],
|
||||
});
|
||||
```
|
||||
|
||||
## Plugin lifecycle
|
||||
|
||||
### Namespaces
|
||||
|
||||
`onLoad` and `onResolve` accept an optional `namespace` string. What is a namespaace?
|
||||
|
||||
Every module has a namespace. Namespaces are used to prefix the import in transpiled code; for instance, a loader with a `filter: /\.yaml$/` and `namespace: "yaml:"` will transform an import from `./myfile.yaml` into `yaml:./myfile.yaml`.
|
||||
|
||||
The default namespace is `"file"` and it is not necessary to specify it, for instance: `import myModule from "./my-module.ts"` is the same as `import myModule from "file:./my-module.ts"`.
|
||||
|
||||
Other common namespaces are:
|
||||
|
||||
- `"bun"`: for Bun-specific modules (e.g. `"bun:test"`, `"bun:sqlite"`)
|
||||
- `"node"`: for Node.js modules (e.g. `"node:fs"`, `"node:path"`)
|
||||
|
||||
### `onStart`
|
||||
|
||||
```ts
|
||||
onStart(callback: () => void): Promise<void> | void;
|
||||
```
|
||||
|
||||
Registers a callback to be run when the bundler starts a new bundle.
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "onStart example",
|
||||
|
||||
setup(build) {
|
||||
build.onStart(() => {
|
||||
console.log("Bundle started!");
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
The callback can return a `Promise`. After the bundle process has initialized, the bundler waits until all `onStart()` callbacks have completed before continuing.
|
||||
|
||||
For example:
|
||||
|
||||
```ts
|
||||
const result = await Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
sourcemap: "external",
|
||||
plugins: [
|
||||
{
|
||||
name: "Sleep for 10 seconds",
|
||||
setup(build) {
|
||||
build.onStart(async () => {
|
||||
await Bunlog.sleep(10_000);
|
||||
});
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Log bundle time to a file",
|
||||
setup(build) {
|
||||
build.onStart(async () => {
|
||||
const now = Date.now();
|
||||
await Bun.$`echo ${now} > bundle-time.txt`;
|
||||
});
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
In the above example, Bun will wait until the first `onStart()` (sleeping for 10 seconds) has completed, _as well as_ the second `onStart()` (writing the bundle time to a file).
|
||||
|
||||
Note that `onStart()` callbacks (like every other lifecycle callback) do not have the ability to modify the `build.config` object. If you want to mutate `build.config`, you must do so directly in the `setup()` function.
|
||||
|
||||
### `onResolve`
|
||||
|
||||
```ts
|
||||
onResolve(
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: (args: { path: string; importer: string }) => {
|
||||
path: string;
|
||||
namespace?: string;
|
||||
} | void,
|
||||
): void;
|
||||
```
|
||||
|
||||
To bundle your project, Bun walks down the dependency tree of all modules in your project. For each imported module, Bun actually has to find and read that module. The "finding" part is known as "resolving" a module.
|
||||
|
||||
The `onResolve()` plugin lifecycle callback allows you to configure how a module is resolved.
|
||||
|
||||
The first argument to `onResolve()` is an object with a `filter` and [`namespace`](#what-is-a-namespace) property. The filter is a regular expression which is run on the import string. Effectively, these allow you to filter which modules your custom resolution logic will apply to.
|
||||
|
||||
The second argument to `onResolve()` is a callback which is run for each module import Bun finds that matches the `filter` and `namespace` defined in the first argument.
|
||||
|
||||
The callback receives as input the _path_ to the matching module. The callback can return a _new path_ for the module. Bun will read the contents of the _new path_ and parse it as a module.
|
||||
|
||||
For example, redirecting all imports to `images/` to `./public/images/`:
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "onResolve example",
|
||||
setup(build) {
|
||||
build.onResolve({ filter: /.*/, namespace: "file" }, args => {
|
||||
if (args.path.startsWith("images/")) {
|
||||
return {
|
||||
path: args.path.replace("images/", "./public/images/"),
|
||||
};
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### `onLoad`
|
||||
|
||||
```ts
|
||||
onLoad(
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
defer: () => Promise<void>,
|
||||
callback: (args: { path: string, importer: string, namespace: string, kind: ImportKind }) => {
|
||||
loader?: Loader;
|
||||
contents?: string;
|
||||
exports?: Record<string, any>;
|
||||
},
|
||||
): void;
|
||||
```
|
||||
|
||||
After Bun's bundler has resolved a module, it needs to read the contents of the module and parse it.
|
||||
|
||||
The `onLoad()` plugin lifecycle callback allows you to modify the _contents_ of a module before it is read and parsed by Bun.
|
||||
|
||||
Like `onResolve()`, the first argument to `onLoad()` allows you to filter which modules this invocation of `onLoad()` will apply to.
|
||||
|
||||
The second argument to `onLoad()` is a callback which is run for each matching module _before_ Bun loads the contents of the module into memory.
|
||||
|
||||
This callback receives as input the _path_ to the matching module, the _importer_ of the module (the module that imported the module), the _namespace_ of the module, and the _kind_ of the module.
|
||||
|
||||
The callback can return a new `contents` string for the module as well as a new `loader`.
|
||||
|
||||
For example:
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
const envPlugin: BunPlugin = {
|
||||
name: "env plugin",
|
||||
setup(build) {
|
||||
build.onLoad({ filter: /env/, namespace: "file" }, args => {
|
||||
return {
|
||||
contents: `export default ${JSON.stringify(process.env)}`,
|
||||
loader: "js",
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
plugins: [envPlugin],
|
||||
});
|
||||
|
||||
// import env from "env"
|
||||
// env.FOO === "bar"
|
||||
```
|
||||
|
||||
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
|
||||
|
||||
#### `.defer()`
|
||||
|
||||
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
|
||||
|
||||
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
|
||||
|
||||
This is useful for returning contens of a module that depends on other modules.
|
||||
|
||||
##### Example: tracking and reporting unused exports
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "track imports",
|
||||
setup(build) {
|
||||
const transpiler = new Bun.Transpiler();
|
||||
|
||||
let trackedImports: Record<string, number> = {};
|
||||
|
||||
// Each module that goes through this onLoad callback
|
||||
// will record its imports in `trackedImports`
|
||||
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
|
||||
const contents = await Bun.file(path).arrayBuffer();
|
||||
|
||||
const imports = transpiler.scanImports(contents);
|
||||
|
||||
for (const i of imports) {
|
||||
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
});
|
||||
|
||||
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
|
||||
// Wait for all files to be loaded, ensuring
|
||||
// that every file goes through the above `onLoad()` function
|
||||
// and their imports tracked
|
||||
await defer();
|
||||
|
||||
// Emit JSON containing the stats of each import
|
||||
return {
|
||||
contents: `export default ${JSON.stringify(trackedImports)}`,
|
||||
loader: "json",
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
|
||||
|
||||
## Native plugins
|
||||
|
||||
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
|
||||
|
||||
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
|
||||
|
||||
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
|
||||
|
||||
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
|
||||
|
||||
These are the following lifecycle hooks which are available to native plugins:
|
||||
|
||||
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
|
||||
|
||||
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
|
||||
|
||||
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
|
||||
|
||||
### Creating a native plugin in Rust
|
||||
|
||||
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
|
||||
|
||||
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
|
||||
|
||||
```bash
|
||||
bun add -g @napi-rs/cli
|
||||
napi new
|
||||
```
|
||||
|
||||
Then install this crate:
|
||||
|
||||
```bash
|
||||
cargo add bun-native-plugin
|
||||
```
|
||||
|
||||
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
|
||||
will implement our native plugin.
|
||||
|
||||
Here's an example implementing the `onBeforeParse` hook:
|
||||
|
||||
```rs
|
||||
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
|
||||
use napi_derive::napi;
|
||||
|
||||
/// Define the plugin and its name
|
||||
define_bun_plugin!("replace-foo-with-bar");
|
||||
|
||||
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
|
||||
/// `foo` with `bar`.
|
||||
///
|
||||
/// We use the #[bun] macro to generate some of the boilerplate code.
|
||||
///
|
||||
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
|
||||
/// the macro that this function implements the `onBeforeParse` hook.
|
||||
#[bun]
|
||||
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
|
||||
// Fetch the input source code.
|
||||
let input_source_code = handle.input_source_code()?;
|
||||
|
||||
// Get the Loader for the file
|
||||
let loader = handle.output_loader();
|
||||
|
||||
|
||||
let output_source_code = input_source_code.replace("foo", "bar");
|
||||
|
||||
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
And to use it in Bun.build():
|
||||
|
||||
```typescript
|
||||
import myNativeAddon from "./my-native-addon";
|
||||
Bun.build({
|
||||
entrypoints: ["./app.tsx"],
|
||||
plugins: [
|
||||
{
|
||||
name: "my-plugin",
|
||||
|
||||
setup(build) {
|
||||
build.onBeforeParse(
|
||||
{
|
||||
namespace: "file",
|
||||
filter: "**/*.tsx",
|
||||
},
|
||||
{
|
||||
napiModule: myNativeAddon,
|
||||
symbol: "replace_foo_with_bar",
|
||||
// external: myNativeAddon.getSharedState()
|
||||
},
|
||||
);
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
```
|
||||
|
||||
### `onBeforeParse`
|
||||
|
||||
```ts
|
||||
onBeforeParse(
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
|
||||
): void;
|
||||
```
|
||||
|
||||
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
|
||||
|
||||
As input, it receives the file's contents and can optionally return new source code.
|
||||
|
||||
This callback can be called from any thread and so the napi module implementation must be thread-safe.
|
||||
|
||||
@@ -695,7 +695,7 @@ In Bun's CLI, simple boolean flags like `--minify` do not accept an argument. Ot
|
||||
- In Bun, `minify` can be a boolean or an object.
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
Bun.build({
|
||||
entrypoints: ['./index.tsx'],
|
||||
// enable all minification
|
||||
minify: true
|
||||
|
||||
@@ -47,9 +47,6 @@ registry = "https://registry.yarnpkg.com/"
|
||||
# Install for production? This is the equivalent to the "--production" CLI argument
|
||||
production = false
|
||||
|
||||
# Save a text-based lockfile? This is equivalent to the "--save-text-lockfile" CLI argument
|
||||
saveTextLockfile = false
|
||||
|
||||
# Disallow changes to lockfile? This is the equivalent to the "--frozen-lockfile" CLI argument
|
||||
frozenLockfile = false
|
||||
|
||||
@@ -57,15 +54,12 @@ frozenLockfile = false
|
||||
dryRun = true
|
||||
|
||||
# Install optionalDependencies (default: true)
|
||||
# Setting this to false is equivalent to the `--omit=optional` CLI argument
|
||||
optional = true
|
||||
|
||||
# Install local devDependencies (default: true)
|
||||
# Setting this to false is equivalent to the `--omit=dev` CLI argument
|
||||
dev = true
|
||||
|
||||
# Install peerDependencies (default: true)
|
||||
# Setting this to false is equivalent to the `--omit=peer` CLI argument
|
||||
peer = true
|
||||
|
||||
# Max number of concurrent lifecycle scripts (default: (cpu count or GOMAXPROCS) x2)
|
||||
@@ -114,7 +108,6 @@ export interface Install {
|
||||
scopes: Scopes;
|
||||
registry: Registry;
|
||||
production: boolean;
|
||||
saveTextLockfile: boolean;
|
||||
frozenLockfile: boolean;
|
||||
dryRun: boolean;
|
||||
optional: boolean;
|
||||
|
||||
@@ -130,20 +130,6 @@ $ bun install --frozen-lockfile
|
||||
|
||||
For more information on Bun's binary lockfile `bun.lockb`, refer to [Package manager > Lockfile](https://bun.sh/docs/install/lockfile).
|
||||
|
||||
## Omitting dependencies
|
||||
|
||||
To omit dev, peer, or optional dependencies use the `--omit` flag.
|
||||
|
||||
```bash
|
||||
# Exclude "devDependencies" from the installation. This will apply to the
|
||||
# root package and workspaces if they exist. Transitive dependencies will
|
||||
# not have "devDependencies".
|
||||
$ bun install --omit dev
|
||||
|
||||
# Install only dependencies from "dependencies"
|
||||
$ bun install --omit=dev --omit=peer --omit=optional
|
||||
```
|
||||
|
||||
## Dry run
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
@@ -163,8 +149,7 @@ Bun supports installing dependencies from Git, GitHub, and local or remotely-hos
|
||||
"lodash": "git+ssh://github.com/lodash/lodash.git#4.17.21",
|
||||
"moment": "git@github.com:moment/moment.git",
|
||||
"zod": "github:colinhacks/zod",
|
||||
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
|
||||
"bun-types": "npm:@types/bun"
|
||||
"react": "https://registry.npmjs.org/react/-/react-18.2.0.tgz"
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -188,9 +173,6 @@ peer = true
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--save-text-lockfile` flag
|
||||
saveTextLockfile = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
name: Configure a private registry for an organization scope with bun install
|
||||
---
|
||||
|
||||
Private registries can be configured using either [`.npmrc`](https://bun.sh/docs/install/npmrc) or [`bunfig.toml`](https://bun.sh/docs/runtime/bunfig#install-registry). While both are supported, we recommend using **bunfig.toml** for enhanced flexibility and Bun-specific options.
|
||||
|
||||
To configure a registry for a particular npm scope:
|
||||
Bun does not read `.npmrc` files; instead private registries are configured via `bunfig.toml`. To configure a registry for a particular npm scope:
|
||||
|
||||
```toml#bunfig.toml
|
||||
[install.scopes]
|
||||
|
||||
@@ -1,120 +0,0 @@
|
||||
---
|
||||
name: "import, require, and test Svelte components with bun test"
|
||||
---
|
||||
|
||||
Bun's [Plugin API](/docs/runtime/plugins) lets you add custom loaders to your project. The `test.preload` option in `bunfig.toml` lets you configure your loader to start before your tests run.
|
||||
|
||||
Firstly, install `@testing-library/svelte`, `svelte`, and `@happy-dom/global-registrator`.
|
||||
|
||||
```bash
|
||||
$ bun add @testing-library/svelte svelte@4 @happy-dom/global-registrator
|
||||
```
|
||||
|
||||
Then, save this plugin in your project.
|
||||
|
||||
```ts#svelte-loader.js
|
||||
import { plugin } from "bun";
|
||||
import { compile } from "svelte/compiler";
|
||||
import { readFileSync } from "fs";
|
||||
import { beforeEach, afterEach } from "bun:test";
|
||||
import { GlobalRegistrator } from "@happy-dom/global-registrator";
|
||||
|
||||
beforeEach(async () => {
|
||||
await GlobalRegistrator.register();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await GlobalRegistrator.unregister();
|
||||
});
|
||||
|
||||
plugin({
|
||||
name: "svelte loader",
|
||||
setup(builder) {
|
||||
builder.onLoad({ filter: /\.svelte(\?[^.]+)?$/ }, ({ path }) => {
|
||||
try {
|
||||
const source = readFileSync(
|
||||
path.substring(
|
||||
0,
|
||||
path.includes("?") ? path.indexOf("?") : path.length
|
||||
),
|
||||
"utf-8"
|
||||
);
|
||||
|
||||
const result = compile(source, {
|
||||
filename: path,
|
||||
generate: "client",
|
||||
dev: false,
|
||||
});
|
||||
|
||||
return {
|
||||
contents: result.js.code,
|
||||
loader: "js",
|
||||
};
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to compile Svelte component: ${err.message}`);
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Add this to `bunfig.toml` to tell Bun to preload the plugin, so it loads before your tests run.
|
||||
|
||||
```toml#bunfig.toml
|
||||
[test]
|
||||
# Tell Bun to load this plugin before your tests run
|
||||
preload = ["./svelte-loader.js"]
|
||||
|
||||
# This also works:
|
||||
# test.preload = ["./svelte-loader.js"]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Add an example `.svelte` file in your project.
|
||||
|
||||
```html#Counter.svelte
|
||||
<script>
|
||||
export let initialCount = 0;
|
||||
let count = initialCount;
|
||||
</script>
|
||||
|
||||
<button on:click={() => (count += 1)}>+1</button>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Now you can `import` or `require` `*.svelte` files in your tests, and it will load the Svelte component as a JavaScript module.
|
||||
|
||||
```ts#hello-svelte.test.ts
|
||||
import { test, expect } from "bun:test";
|
||||
import { render, fireEvent } from "@testing-library/svelte";
|
||||
import Counter from "./Counter.svelte";
|
||||
|
||||
test("Counter increments when clicked", async () => {
|
||||
const { getByText, component } = render(Counter);
|
||||
const button = getByText("+1");
|
||||
|
||||
// Initial state
|
||||
expect(component.$$.ctx[0]).toBe(0); // initialCount is the first prop
|
||||
|
||||
// Click the increment button
|
||||
await fireEvent.click(button);
|
||||
|
||||
// Check the new state
|
||||
expect(component.$$.ctx[0]).toBe(1);
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Use `bun test` to run your tests.
|
||||
|
||||
```bash
|
||||
$ bun test
|
||||
```
|
||||
|
||||
---
|
||||
@@ -55,13 +55,6 @@ To install dependencies without allowing changes to lockfile (useful on CI):
|
||||
$ bun install --frozen-lockfile
|
||||
```
|
||||
|
||||
To exclude dependency types from installing, use `--omit` with `dev`, `optional`, or `peer`:
|
||||
|
||||
```bash
|
||||
# Disable devDependencies and optionalDependencies
|
||||
$ bun install --omit=dev --omit=optional
|
||||
```
|
||||
|
||||
To perform a dry run (i.e. don't actually install anything):
|
||||
|
||||
```bash
|
||||
@@ -93,9 +86,6 @@ peer = true
|
||||
# equivalent to `--production` flag
|
||||
production = false
|
||||
|
||||
# equivalent to `--save-text-lockfile` flag
|
||||
saveTextLockfile = false
|
||||
|
||||
# equivalent to `--frozen-lockfile` flag
|
||||
frozenLockfile = false
|
||||
|
||||
|
||||
@@ -72,24 +72,6 @@ $ bun install --yarn
|
||||
print = "yarn"
|
||||
```
|
||||
|
||||
### Text-based lockfile
|
||||
|
||||
Bun v1.1.39 introduced `bun.lock`, a JSONC formatted lockfile. `bun.lock` is human-readable and git-diffable without configuration, at [no cost to performance](https://bun.sh/blog/bun-lock-text-lockfile#cached-bun-install-gets-30-faster).
|
||||
|
||||
To generate the lockfile, use `--save-text-lockfile` with `bun install`. You can do this for new projects and existing projects already using `bun.lockb` (resolutions will be preserved).
|
||||
|
||||
```bash
|
||||
$ bun install --save-text-lockfile
|
||||
$ head -n3 bun.lock
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"workspaces": {
|
||||
```
|
||||
|
||||
Once `bun.lock` is generated, Bun will use it for all subsequent installs and updates through commands that read and modify the lockfile. If both lockfiles exist, `bun.lock` will be choosen over `bun.lockb`.
|
||||
|
||||
Bun v1.2.0 will switch the default lockfile format to `bun.lock`.
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
{% details summary="Configuring lockfile" %}
|
||||
|
||||
@@ -6,7 +6,7 @@ Bun supports loading configuration options from [`.npmrc`](https://docs.npmjs.co
|
||||
|
||||
{% /callout %}
|
||||
|
||||
## Supported options
|
||||
# Supported options
|
||||
|
||||
### `registry`: Set the default registry
|
||||
|
||||
|
||||
@@ -402,9 +402,6 @@ export default {
|
||||
page("project/building-windows", "Building Windows", {
|
||||
description: "Learn how to setup a development environment for contributing to the Windows build of Bun.",
|
||||
}),
|
||||
page("project/bindgen", "Bindgen", {
|
||||
description: "About the bindgen code generator",
|
||||
}),
|
||||
page("project/licensing", "License", {
|
||||
description: `Bun is a MIT-licensed project with a large number of statically-linked dependencies with various licenses.`,
|
||||
}),
|
||||
|
||||
@@ -1,225 +0,0 @@
|
||||
{% callout %}
|
||||
|
||||
This document is for maintainers and contributors to Bun, and describes internal implementation details.
|
||||
|
||||
{% /callout %}
|
||||
|
||||
The new bindings generator, introduced to the codebase in Dec 2024, scans for
|
||||
`*.bind.ts` to find function and class definition, and generates glue code to
|
||||
interop between JavaScript and native code.
|
||||
|
||||
There are currently other code generators and systems that achieve similar
|
||||
purposes. The following will all eventually be completely phased out in favor of
|
||||
this one:
|
||||
|
||||
- "Classes generator", converting `*.classes.ts` for custom classes.
|
||||
- "JS2Native", allowing ad-hoc calls from `src/js` to native code.
|
||||
|
||||
## Creating JS Functions in Zig
|
||||
|
||||
Given a file implementing a simple function, such as `add`
|
||||
|
||||
```zig#src/bun.js/math.zig
|
||||
pub fn add(global: *JSC.JSGlobalObject, a: i32, b: i32) !i32 {
|
||||
return std.math.add(i32, a, b) catch {
|
||||
// Binding functions can return `error.OutOfMemory` and `error.JSError`.
|
||||
// Others like `error.Overflow` from `std.math.add` must be converted.
|
||||
// Remember to be descriptive.
|
||||
return global.throwPretty("Integer overflow while adding", .{});
|
||||
};
|
||||
}
|
||||
|
||||
const gen = bun.gen.math; // "math" being this file's basename
|
||||
|
||||
const std = @import("std");
|
||||
const bun = @import("root").bun;
|
||||
const JSC = bun.JSC;
|
||||
```
|
||||
|
||||
Then describe the API schema using a `.bind.ts` function. The binding file goes next to the Zig file.
|
||||
|
||||
```ts#src/bun.js/math.bind.ts
|
||||
import { t, fn } from 'bindgen';
|
||||
|
||||
export const add = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
a: t.i32,
|
||||
b: t.i32.default(1),
|
||||
},
|
||||
ret: t.i32,
|
||||
});
|
||||
```
|
||||
|
||||
This function declaration is equivalent to:
|
||||
|
||||
```ts
|
||||
/**
|
||||
* Throws if zero arguments are provided.
|
||||
* Wraps out of range numbers using modulo.
|
||||
*/
|
||||
declare function add(a: number, b: number = 1): number;
|
||||
```
|
||||
|
||||
The code generator will provide `bun.gen.math.jsAdd`, which is the native
|
||||
function implementation. To pass to JavaScript, use
|
||||
`bun.gen.math.createAddCallback(global)`. JS files in `src/js/` may use
|
||||
`$bindgenFn("math.bind.ts", "add")` to get a handle to the implementation.
|
||||
|
||||
## Strings
|
||||
|
||||
The type for receiving strings is one of [`t.DOMString`](https://webidl.spec.whatwg.org/#idl-DOMString), [`t.ByteString`](https://webidl.spec.whatwg.org/#idl-ByteString), and [`t.USVString`](https://webidl.spec.whatwg.org/#idl-USVString). These map directly to their WebIDL counterparts, and have slightly different conversion logic. Bindgen will pass BunString to native code in all cases.
|
||||
|
||||
When in doubt, use DOMString.
|
||||
|
||||
`t.UTF8String` can be used in place of `t.DOMString`, but will call `bun.String.toUTF8`. The native callback gets `[]const u8` (WTF-8 data) passed to native code, freeing it after the function returns.
|
||||
|
||||
TLDRs from WebIDL spec:
|
||||
|
||||
- ByteString can only contain valid latin1 characters. It is not safe to assume bun.String is already in 8-bit format, but it is extremely likely.
|
||||
- USVString will not contain invalid surrogate pairs, aka text that can be represented correctly in UTF-8.
|
||||
- DOMString is the loosest but also most recommended strategy.
|
||||
|
||||
## Function Variants
|
||||
|
||||
A `variants` can specify multiple variants (also known as overloads).
|
||||
|
||||
```ts#src/bun.js/math.bind.ts
|
||||
import { t, fn } from 'bindgen';
|
||||
|
||||
export const action = fn({
|
||||
variants: [
|
||||
{
|
||||
args: {
|
||||
a: t.i32,
|
||||
},
|
||||
ret: t.i32,
|
||||
},
|
||||
{
|
||||
args: {
|
||||
a: t.DOMString,
|
||||
},
|
||||
ret: t.DOMString,
|
||||
},
|
||||
]
|
||||
});
|
||||
```
|
||||
|
||||
In Zig, each variant gets a number, based on the order the schema defines.
|
||||
|
||||
```zig
|
||||
fn action1(a: i32) i32 {
|
||||
return a;
|
||||
}
|
||||
|
||||
fn action2(a: bun.String) bun.String {
|
||||
return a;
|
||||
}
|
||||
```
|
||||
|
||||
## `t.dictionary`
|
||||
|
||||
A `dictionary` is a definition for a JavaScript object, typically as a function inputs. For function outputs, it is usually a smarter idea to declare a class type to add functions and destructuring.
|
||||
|
||||
## Enumerations
|
||||
|
||||
To use [WebIDL's enumeration](https://webidl.spec.whatwg.org/#idl-enums) type, use either:
|
||||
|
||||
- `t.stringEnum`: Create and codegen a new enum type.
|
||||
- `t.zigEnum`: Derive a bindgen type off of an existing enum in the codebase.
|
||||
|
||||
An example of `stringEnum` as used in `fmt.zig` / `bun:internal-for-testing`
|
||||
|
||||
```ts
|
||||
export const Formatter = t.stringEnum(
|
||||
"highlight-javascript",
|
||||
"escape-powershell",
|
||||
);
|
||||
|
||||
export const fmtString = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
code: t.UTF8String,
|
||||
formatter: Formatter,
|
||||
},
|
||||
ret: t.DOMString,
|
||||
});
|
||||
```
|
||||
|
||||
WebIDL strongly encourages using kebab case for enumeration values, to be consistent with existing Web APIs.
|
||||
|
||||
### Deriving enums from Zig code
|
||||
|
||||
TODO: zigEnum
|
||||
|
||||
## `t.oneOf`
|
||||
|
||||
A `oneOf` is a union between two or more types. It is represented by `union(enum)` in Zig.
|
||||
|
||||
TODO:
|
||||
|
||||
## Attributes
|
||||
|
||||
There are set of attributes that can be chained onto `t.*` types. On all types there are:
|
||||
|
||||
- `.required`, in dictionary parameters only
|
||||
- `.optional`, in function arguments only
|
||||
- `.default(T)`
|
||||
|
||||
When a value is optional, it is lowered to a Zig optional.
|
||||
|
||||
Depending on the type, there are more attributes available. See the type definitions in auto-complete for more details. Note that one of the above three can only be applied, and they must be applied at the end.
|
||||
|
||||
### Integer Attributes
|
||||
|
||||
Integer types allow customizing the overflow behavior with `clamp` or `enforceRange`
|
||||
|
||||
```ts
|
||||
import { t, fn } from "bindgen";
|
||||
|
||||
export const add = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
// enforce in i32 range
|
||||
a: t.i32.enforceRange(),
|
||||
// clamp to u16 range
|
||||
b: t.u16,
|
||||
// enforce in arbitrary range, with a default if not provided
|
||||
c: t.i32.enforceRange(0, 1000).default(5),
|
||||
// clamp to arbitrary range, or null
|
||||
d: t.u16.clamp(0, 10).optional,
|
||||
},
|
||||
ret: t.i32,
|
||||
});
|
||||
```
|
||||
|
||||
Various Node.js validator functions such as `validateInteger`, `validateNumber`, and more are available. Use these when implementing Node.js APIs, so the error messages match 1:1 what Node would do.
|
||||
|
||||
Unlike `enforceRange`, which is taken from WebIDL, `validate*` functions are much more strict on the input they accept. For example, Node's numerical validator check `typeof value === 'number'`, while WebIDL uses `ToNumber` for lossy conversion.
|
||||
|
||||
```ts
|
||||
import { t, fn } from "bindgen";
|
||||
|
||||
export const add = fn({
|
||||
args: {
|
||||
global: t.globalObject,
|
||||
// throw if not given a number
|
||||
a: t.f64.validateNumber(),
|
||||
// valid in i32 range
|
||||
a: t.i32.validateInt32(),
|
||||
// f64 within safe integer range
|
||||
b: t.f64.validateInteger(),
|
||||
// f64 in given range
|
||||
c: t.f64.validateNumber(-10000, 10000),
|
||||
},
|
||||
ret: t.i32,
|
||||
});
|
||||
```
|
||||
|
||||
## Callbacks
|
||||
|
||||
TODO
|
||||
|
||||
## Classes
|
||||
|
||||
TODO
|
||||
@@ -238,17 +238,6 @@ By default Bun uses caret ranges; if the `latest` version of a package is `2.4.1
|
||||
exact = false
|
||||
```
|
||||
|
||||
### `install.saveTextLockfile`
|
||||
|
||||
Generate `bun.lock`, a human-readable text-based lockfile. Once generated, Bun will use this file instead of `bun.lockb`, choosing it over the binary lockfile if both are present.
|
||||
|
||||
Default `false`. In Bun v1.2.0 the default lockfile format will change to `bun.lock`.
|
||||
|
||||
```toml
|
||||
[install]
|
||||
saveTextLockfile = true
|
||||
```
|
||||
|
||||
<!--
|
||||
### `install.prefer`
|
||||
|
||||
|
||||
@@ -259,7 +259,6 @@ await Bun.build({
|
||||
conditions: ["react-server"],
|
||||
target: "bun",
|
||||
entryPoints: ["./app/foo/route.js"],
|
||||
throw: true,
|
||||
});
|
||||
```
|
||||
|
||||
|
||||
@@ -307,7 +307,7 @@ await import("my-object-virtual-module"); // { baz: "quix" }
|
||||
Plugins can read and write to the [build config](https://bun.sh/docs/bundler#api) with `build.config`.
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
sourcemap: "external",
|
||||
@@ -324,7 +324,6 @@ await Bun.build({
|
||||
},
|
||||
},
|
||||
],
|
||||
throw: true,
|
||||
});
|
||||
```
|
||||
|
||||
@@ -333,7 +332,7 @@ await Bun.build({
|
||||
**NOTE**: Plugin lifcycle callbacks (`onStart()`, `onResolve()`, etc.) do not have the ability to modify the `build.config` object in the `setup()` function. If you want to mutate `build.config`, you must do so directly in the `setup()` function:
|
||||
|
||||
```ts
|
||||
await Bun.build({
|
||||
Bun.build({
|
||||
entrypoints: ["./app.ts"],
|
||||
outdir: "./dist",
|
||||
sourcemap: "external",
|
||||
@@ -351,7 +350,6 @@ await Bun.build({
|
||||
},
|
||||
},
|
||||
],
|
||||
throw: true,
|
||||
});
|
||||
```
|
||||
|
||||
@@ -555,3 +553,150 @@ plugin({
|
||||
```
|
||||
|
||||
This plugin will transform all imports of the form `import env from "env"` into a JavaScript module that exports the current environment variables.
|
||||
|
||||
#### `.defer()`
|
||||
|
||||
One of the arguments passed to the `onLoad` callback is a `defer` function. This function returns a `Promise` that is resolved when all _other_ modules have been loaded.
|
||||
|
||||
This allows you to delay execution of the `onLoad` callback until all other modules have been loaded.
|
||||
|
||||
This is useful for returning contens of a module that depends on other modules.
|
||||
|
||||
##### Example: tracking and reporting unused exports
|
||||
|
||||
```ts
|
||||
import { plugin } from "bun";
|
||||
|
||||
plugin({
|
||||
name: "track imports",
|
||||
setup(build) {
|
||||
const transpiler = new Bun.Transpiler();
|
||||
|
||||
let trackedImports: Record<string, number> = {};
|
||||
|
||||
// Each module that goes through this onLoad callback
|
||||
// will record its imports in `trackedImports`
|
||||
build.onLoad({ filter: /\.ts/ }, async ({ path }) => {
|
||||
const contents = await Bun.file(path).arrayBuffer();
|
||||
|
||||
const imports = transpiler.scanImports(contents);
|
||||
|
||||
for (const i of imports) {
|
||||
trackedImports[i.path] = (trackedImports[i.path] || 0) + 1;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
});
|
||||
|
||||
build.onLoad({ filter: /stats\.json/ }, async ({ defer }) => {
|
||||
// Wait for all files to be loaded, ensuring
|
||||
// that every file goes through the above `onLoad()` function
|
||||
// and their imports tracked
|
||||
await defer();
|
||||
|
||||
// Emit JSON containing the stats of each import
|
||||
return {
|
||||
contents: `export default ${JSON.stringify(trackedImports)}`,
|
||||
loader: "json",
|
||||
};
|
||||
});
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Note that the `.defer()` function currently has the limitation that it can only be called once per `onLoad` callback.
|
||||
|
||||
## Native plugins
|
||||
|
||||
{% callout %}
|
||||
**NOTE** — This is an advanced and experiemental API recommended for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
|
||||
{% /callout %}
|
||||
|
||||
One of the reasons why Bun's bundler is so fast is that it is written in native code and leverages multi-threading to load and parse modules in parallel.
|
||||
|
||||
However, one limitation of plugins written in JavaScript is that JavaScript itself is single-threaded.
|
||||
|
||||
Native plugins are written as [NAPI](/docs/node-api) modules and can be run on multiple threads. This allows native plugins to run much faster than JavaScript plugins.
|
||||
|
||||
In addition, native plugins can skip unnecessary work such as the UTF-8 -> UTF-16 conversion needed to pass strings to JavaScript.
|
||||
|
||||
These are the following lifecycle hooks which are available to native plugins:
|
||||
|
||||
- [`onBeforeParse()`](#onbeforeparse): Called on any thread before a file is parsed by Bun's bundler.
|
||||
|
||||
### Creating a native plugin
|
||||
|
||||
Native plugins are NAPI modules which expose lifecycle hooks as C ABI functions.
|
||||
|
||||
To create a native plugin, you must export a C ABI function which matches the signature of the native lifecycle hook you want to implement.
|
||||
|
||||
#### Example: Rust with napi-rs
|
||||
|
||||
First initialize a napi project (see [here](https://napi.rs/docs/introduction/getting-started) for a more comprehensive guide).
|
||||
|
||||
Then install Bun's official safe plugin wrapper crate:
|
||||
|
||||
```bash
|
||||
cargo add bun-native-plugin
|
||||
```
|
||||
|
||||
Now you can export an `extern "C" fn` which is the implementation of your plugin:
|
||||
|
||||
```rust
|
||||
#[no_mangle]
|
||||
extern "C" fn on_before_parse_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
let result = unsafe { &mut *result };
|
||||
|
||||
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_code = match handle.input_source_code() {
|
||||
Ok(source_code) => source_code,
|
||||
Err(_) => {
|
||||
handle.log_error("Fetching source code failed!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let loader = handle.output_loader();
|
||||
handle.set_output_source_code(source_code.replace("foo", "bar"), loader);
|
||||
```
|
||||
|
||||
Use napi-rs to compile the plugin to a `.node` file, then you can `require()` it from JS and use it:
|
||||
|
||||
```js
|
||||
await Bun.build({
|
||||
entrypoints: ["index.ts"],
|
||||
setup(build) {
|
||||
const myNativePlugin = require("./path/to/plugin.node");
|
||||
|
||||
build.onBeforeParse(
|
||||
{ filter: /\.ts/ },
|
||||
{ napiModule: myNativePlugin, symbol: "on_before_parse_impl" },
|
||||
);
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
### `onBeforeParse`
|
||||
|
||||
```ts
|
||||
onBeforeParse(
|
||||
args: { filter: RegExp; namespace?: string },
|
||||
callback: { napiModule: NapiModule; symbol: string; external?: unknown },
|
||||
): void;
|
||||
```
|
||||
|
||||
This lifecycle callback is run immediately before a file is parsed by Bun's bundler.
|
||||
|
||||
As input, it receives the file's contents and can optionally return new source code.
|
||||
|
||||
This callback can be called from any thread and so the napi module implementation must be thread-safe.
|
||||
|
||||
@@ -14,7 +14,5 @@
|
||||
<true/>
|
||||
<key>com.apple.security.get-task-allow</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.debugger</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
</plist>
|
||||
@@ -1,7 +0,0 @@
|
||||
process handle -p true -s false -n false SIGUSR1
|
||||
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
@@ -1,733 +0,0 @@
|
||||
# https://github.com/ziglang/zig/blob/master/tools/lldb_pretty_printers.py
|
||||
|
||||
# pretty printing for the zig language, zig standard library, and zig stage 2 compiler.
|
||||
# put commands in ~/.lldbinit to run them automatically when starting lldb
|
||||
# `command script import /path/to/zig/tools/lldb_pretty_printers.py` to import this file
|
||||
# `type category enable zig.lang` to enable pretty printing for the zig language
|
||||
# `type category enable zig.std` to enable pretty printing for the zig standard library
|
||||
# `type category enable zig.stage2` to enable pretty printing for the zig stage 2 compiler
|
||||
import lldb
|
||||
import re
|
||||
|
||||
page_size = 1 << 12
|
||||
|
||||
def log2_int(i): return i.bit_length() - 1
|
||||
|
||||
# Define Zig Language
|
||||
|
||||
zig_keywords = {
|
||||
'addrspace',
|
||||
'align',
|
||||
'allowzero',
|
||||
'and',
|
||||
'anyframe',
|
||||
'anytype',
|
||||
'asm',
|
||||
'async',
|
||||
'await',
|
||||
'break',
|
||||
'callconv',
|
||||
'catch',
|
||||
'comptime',
|
||||
'const',
|
||||
'continue',
|
||||
'defer',
|
||||
'else',
|
||||
'enum',
|
||||
'errdefer',
|
||||
'error',
|
||||
'export',
|
||||
'extern',
|
||||
'fn',
|
||||
'for',
|
||||
'if',
|
||||
'inline',
|
||||
'noalias',
|
||||
'noinline',
|
||||
'nosuspend',
|
||||
'opaque',
|
||||
'or',
|
||||
'orelse',
|
||||
'packed',
|
||||
'pub',
|
||||
'resume',
|
||||
'return',
|
||||
'linksection',
|
||||
'struct',
|
||||
'suspend',
|
||||
'switch',
|
||||
'test',
|
||||
'threadlocal',
|
||||
'try',
|
||||
'union',
|
||||
'unreachable',
|
||||
'usingnamespace',
|
||||
'var',
|
||||
'volatile',
|
||||
'while',
|
||||
}
|
||||
zig_primitives = {
|
||||
'anyerror',
|
||||
'anyframe',
|
||||
'anyopaque',
|
||||
'bool',
|
||||
'c_int',
|
||||
'c_long',
|
||||
'c_longdouble',
|
||||
'c_longlong',
|
||||
'c_short',
|
||||
'c_uint',
|
||||
'c_ulong',
|
||||
'c_ulonglong',
|
||||
'c_ushort',
|
||||
'comptime_float',
|
||||
'comptime_int',
|
||||
'f128',
|
||||
'f16',
|
||||
'f32',
|
||||
'f64',
|
||||
'f80',
|
||||
'false',
|
||||
'isize',
|
||||
'noreturn',
|
||||
'null',
|
||||
'true',
|
||||
'type',
|
||||
'undefined',
|
||||
'usize',
|
||||
'void',
|
||||
}
|
||||
zig_integer_type = re.compile('[iu][1-9][0-9]+')
|
||||
zig_identifier_regex = re.compile('[A-Z_a-z][0-9A-Z_a-z]*')
|
||||
def zig_IsVariableName(string): return string != '_' and string not in zig_keywords and string not in zig_primitives and not zig_integer_type.fullmatch(string) and zig_identifier_regex.fullmatch(string)
|
||||
def zig_IsFieldName(string): return string not in zig_keywords and zig_identifier_regex.fullmatch(string)
|
||||
|
||||
class zig_Slice_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.ptr = self.value.GetChildMemberWithName('ptr')
|
||||
self.len = self.value.GetChildMemberWithName('len').unsigned if self.ptr.unsigned > page_size else 0
|
||||
self.elem_type = self.ptr.type.GetPointeeType()
|
||||
self.elem_size = self.elem_type.size
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return self.len or 0
|
||||
def get_child_index(self, name):
|
||||
try: return int(name.removeprefix('[').removesuffix(']'))
|
||||
except: return -1
|
||||
def get_child_at_index(self, index):
|
||||
if index not in range(self.len): return None
|
||||
try: return self.ptr.CreateChildAtOffset('[%d]' % index, index * self.elem_size, self.elem_type)
|
||||
except: return None
|
||||
|
||||
def zig_String_decode(value, offset=0, length=None):
|
||||
try:
|
||||
value = value.GetNonSyntheticValue()
|
||||
data = value.GetChildMemberWithName('ptr').GetPointeeData(offset, length if length is not None else value.GetChildMemberWithName('len').unsigned)
|
||||
b = bytes(data.uint8)
|
||||
b = b.replace(b'\\', b'\\\\')
|
||||
b = b.replace(b'\n', b'\\n')
|
||||
b = b.replace(b'\r', b'\\r')
|
||||
b = b.replace(b'\t', b'\\t')
|
||||
b = b.replace(b'"', b'\\"')
|
||||
b = b.replace(b'\'', b'\\\'')
|
||||
s = b.decode(encoding='ascii', errors='backslashreplace')
|
||||
return s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
|
||||
except: return None
|
||||
def zig_String_SummaryProvider(value, _=None): return '"%s"' % zig_String_decode(value)
|
||||
def zig_String_AsIdentifier(value, pred):
|
||||
string = zig_String_decode(value)
|
||||
return string if pred(string) else '@"%s"' % string
|
||||
|
||||
class zig_Optional_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.child = self.value.GetChildMemberWithName('some').unsigned == 1 and self.value.GetChildMemberWithName('data').Clone('child')
|
||||
except: pass
|
||||
def has_children(self): return bool(self.child)
|
||||
def num_children(self): return int(self.child)
|
||||
def get_child_index(self, name): return 0 if self.child and (name == 'child' or name == '?') else -1
|
||||
def get_child_at_index(self, index): return self.child if self.child and index == 0 else None
|
||||
def zig_Optional_SummaryProvider(value, _=None):
|
||||
child = value.GetChildMemberWithName('child')
|
||||
return child or 'null'
|
||||
|
||||
class zig_ErrorUnion_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.error_set = self.value.GetChildMemberWithName('tag').Clone('error_set')
|
||||
self.payload = self.value.GetChildMemberWithName('value').Clone('payload') if self.error_set.unsigned == 0 else None
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 1
|
||||
def get_child_index(self, name): return 0 if name == ('payload' if self.payload else 'error_set') else -1
|
||||
def get_child_at_index(self, index): return self.payload or self.error_set if index == 0 else None
|
||||
|
||||
class zig_TaggedUnion_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.tag = self.value.GetChildMemberWithName('tag')
|
||||
self.payload = self.value.GetChildMemberWithName('payload').GetChildMemberWithName(self.tag.value)
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 1 + (self.payload is not None)
|
||||
def get_child_index(self, name):
|
||||
try: return ('tag', 'payload').index(name)
|
||||
except: return -1
|
||||
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
|
||||
|
||||
# Define Zig Standard Library
|
||||
|
||||
class std_SegmentedList_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.prealloc_segment = self.value.GetChildMemberWithName('prealloc_segment')
|
||||
self.dynamic_segments = zig_Slice_SynthProvider(self.value.GetChildMemberWithName('dynamic_segments'))
|
||||
self.dynamic_segments.update()
|
||||
self.len = self.value.GetChildMemberWithName('len').unsigned
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return self.len
|
||||
def get_child_index(self, name):
|
||||
try: return int(name.removeprefix('[').removesuffix(']'))
|
||||
except: return -1
|
||||
def get_child_at_index(self, index):
|
||||
try:
|
||||
if index not in range(self.len): return None
|
||||
prealloc_item_count = len(self.prealloc_segment)
|
||||
if index < prealloc_item_count: return self.prealloc_segment.child[index]
|
||||
prealloc_exp = prealloc_item_count.bit_length() - 1
|
||||
shelf_index = log2_int(index + 1) if prealloc_item_count == 0 else log2_int(index + prealloc_item_count) - prealloc_exp - 1
|
||||
shelf = self.dynamic_segments.get_child_at_index(shelf_index)
|
||||
box_index = (index + 1) - (1 << shelf_index) if prealloc_item_count == 0 else index + prealloc_item_count - (1 << ((prealloc_exp + 1) + shelf_index))
|
||||
elem_type = shelf.type.GetPointeeType()
|
||||
return shelf.CreateChildAtOffset('[%d]' % index, box_index * elem_type.size, elem_type)
|
||||
except: return None
|
||||
|
||||
class std_MultiArrayList_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.len = 0
|
||||
|
||||
value_type = self.value.type
|
||||
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
|
||||
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
|
||||
if ptr_self_type.GetPointeeType() == value_type: break
|
||||
else: return
|
||||
|
||||
self.entry_type = ptr_entry_type.GetPointeeType()
|
||||
self.bytes = self.value.GetChildMemberWithName('bytes')
|
||||
self.len = self.value.GetChildMemberWithName('len').unsigned
|
||||
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return self.len
|
||||
def get_child_index(self, name):
|
||||
try: return int(name.removeprefix('[').removesuffix(']'))
|
||||
except: return -1
|
||||
def get_child_at_index(self, index):
|
||||
try:
|
||||
if index not in range(self.len): return None
|
||||
offset = 0
|
||||
data = lldb.SBData()
|
||||
for field in self.entry_type.fields:
|
||||
field_type = field.type.GetPointeeType()
|
||||
field_size = field_type.size
|
||||
data.Append(self.bytes.CreateChildAtOffset(field.name, offset + index * field_size, field_type).address_of.data)
|
||||
offset += self.capacity * field_size
|
||||
return self.bytes.CreateValueFromData('[%d]' % index, data, self.entry_type)
|
||||
except: return None
|
||||
class std_MultiArrayList_Slice_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.len = 0
|
||||
|
||||
value_type = self.value.type
|
||||
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
|
||||
ptr_self_type, ptr_child_type, ptr_field_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
|
||||
if ptr_self_type.GetPointeeType() == value_type: break
|
||||
else: return
|
||||
|
||||
self.fields = {member.name: index for index, member in enumerate(ptr_field_type.GetPointeeType().enum_members)}
|
||||
self.entry_type = ptr_entry_type.GetPointeeType()
|
||||
self.ptrs = self.value.GetChildMemberWithName('ptrs')
|
||||
self.len = self.value.GetChildMemberWithName('len').unsigned
|
||||
self.capacity = self.value.GetChildMemberWithName('capacity').unsigned
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return self.len
|
||||
def get_child_index(self, name):
|
||||
try: return int(name.removeprefix('[').removesuffix(']'))
|
||||
except: return -1
|
||||
def get_child_at_index(self, index):
|
||||
try:
|
||||
if index not in range(self.len): return None
|
||||
data = lldb.SBData()
|
||||
for field in self.entry_type.fields:
|
||||
field_type = field.type.GetPointeeType()
|
||||
data.Append(self.ptrs.child[self.fields[field.name.removesuffix('_ptr')]].CreateChildAtOffset(field.name, index * field_type.size, field_type).address_of.data)
|
||||
return self.ptrs.CreateValueFromData('[%d]' % index, data, self.entry_type)
|
||||
except: return None
|
||||
|
||||
class std_HashMapUnmanaged_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.capacity = 0
|
||||
self.indices = tuple()
|
||||
|
||||
self.metadata = self.value.GetChildMemberWithName('metadata')
|
||||
if not self.metadata.unsigned: return
|
||||
|
||||
value_type = self.value.type
|
||||
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
|
||||
ptr_self_type, ptr_hdr_type, ptr_entry_type = helper.function.type.GetFunctionArgumentTypes()
|
||||
if ptr_self_type.GetPointeeType() == value_type: break
|
||||
else: return
|
||||
self.entry_type = ptr_entry_type.GetPointeeType()
|
||||
|
||||
hdr_type = ptr_hdr_type.GetPointeeType()
|
||||
hdr = self.metadata.CreateValueFromAddress('header', self.metadata.deref.load_addr - hdr_type.size, hdr_type)
|
||||
self.values = hdr.GetChildMemberWithName('values')
|
||||
self.keys = hdr.GetChildMemberWithName('keys')
|
||||
self.capacity = hdr.GetChildMemberWithName('capacity').unsigned
|
||||
|
||||
self.indices = tuple(i for i, value in enumerate(self.metadata.GetPointeeData(0, self.capacity).sint8) if value < 0)
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return len(self.indices)
|
||||
def get_capacity(self): return self.capacity
|
||||
def get_child_index(self, name):
|
||||
try: return int(name.removeprefix('[').removesuffix(']'))
|
||||
except: return -1
|
||||
def get_child_at_index(self, index):
|
||||
try:
|
||||
fields = {name: base.CreateChildAtOffset(name, self.indices[index] * pointee_type.size, pointee_type).address_of.data for name, base, pointee_type in ((name, base, base.type.GetPointeeType()) for name, base in (('key_ptr', self.keys), ('value_ptr', self.values)))}
|
||||
data = lldb.SBData()
|
||||
for field in self.entry_type.fields: data.Append(fields[field.name])
|
||||
return self.metadata.CreateValueFromData('[%d]' % index, data, self.entry_type)
|
||||
except: return None
|
||||
def std_HashMapUnmanaged_SummaryProvider(value, _=None):
|
||||
synth = std_HashMapUnmanaged_SynthProvider(value.GetNonSyntheticValue(), _)
|
||||
synth.update()
|
||||
return 'len=%d capacity=%d' % (synth.num_children(), synth.get_capacity())
|
||||
|
||||
# formats a struct of fields of the form `name_ptr: *Type` by auto dereferencing its fields
|
||||
class std_Entry_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.children = tuple(child.Clone(child.name.removesuffix('_ptr')) for child in self.value.children if child.type.GetPointeeType().size != 0)
|
||||
self.indices = {child.name: i for i, child in enumerate(self.children)}
|
||||
except: pass
|
||||
def has_children(self): return self.num_children() != 0
|
||||
def num_children(self): return len(self.children)
|
||||
def get_child_index(self, name): return self.indices.get(name)
|
||||
def get_child_at_index(self, index): return self.children[index].deref if index in range(len(self.children)) else None
|
||||
|
||||
# Define Zig Stage2 Compiler
|
||||
|
||||
class TagAndPayload_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
self.tag = self.value.GetChildMemberWithName('tag') or self.value.GetChildMemberWithName('tag_ptr').deref.Clone('tag')
|
||||
data = self.value.GetChildMemberWithName('data_ptr') or self.value.GetChildMemberWithName('data')
|
||||
self.payload = data.GetChildMemberWithName('payload').GetChildMemberWithName(data.GetChildMemberWithName('tag').value)
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 2
|
||||
def get_child_index(self, name):
|
||||
try: return ('tag', 'payload').index(name)
|
||||
except: return -1
|
||||
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
|
||||
|
||||
def InstRef_SummaryProvider(value, _=None):
|
||||
return value if any(value.unsigned == member.unsigned for member in value.type.enum_members) else (
|
||||
'InternPool.Index(%d)' % value.unsigned if value.unsigned < 0x80000000 else 'instructions[%d]' % (value.unsigned - 0x80000000))
|
||||
|
||||
def InstIndex_SummaryProvider(value, _=None):
|
||||
return 'instructions[%d]' % value.unsigned
|
||||
|
||||
class zig_DeclIndex_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
ip = InternPool_Find(self.value.thread)
|
||||
if not ip: return
|
||||
self.ptr = ip.GetChildMemberWithName('allocated_decls').GetChildAtIndex(self.value.unsigned).address_of.Clone('decl')
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 1
|
||||
def get_child_index(self, name): return 0 if name == 'decl' else -1
|
||||
def get_child_at_index(self, index): return self.ptr if index == 0 else None
|
||||
|
||||
class Module_Namespace__Module_Namespace_Index_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
ip = InternPool_Find(self.value.thread)
|
||||
if not ip: return
|
||||
self.ptr = ip.GetChildMemberWithName('allocated_namespaces').GetChildAtIndex(self.value.unsigned).address_of.Clone('namespace')
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 1
|
||||
def get_child_index(self, name): return 0 if name == 'namespace' else -1
|
||||
def get_child_at_index(self, index): return self.ptr if index == 0 else None
|
||||
|
||||
class TagOrPayloadPtr_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
value_type = self.value.type
|
||||
for helper in self.value.target.FindFunctions('%s.dbHelper' % value_type.name, lldb.eFunctionNameTypeFull):
|
||||
ptr_self_type, ptr_tag_to_payload_map_type = helper.function.type.GetFunctionArgumentTypes()
|
||||
self_type = ptr_self_type.GetPointeeType()
|
||||
if self_type == value_type: break
|
||||
else: return
|
||||
tag_to_payload_map = {field.name: field.type for field in ptr_tag_to_payload_map_type.GetPointeeType().fields}
|
||||
|
||||
tag = self.value.GetChildMemberWithName('tag_if_small_enough')
|
||||
if tag.unsigned < page_size:
|
||||
self.tag = tag.Clone('tag')
|
||||
self.payload = None
|
||||
else:
|
||||
ptr_otherwise = self.value.GetChildMemberWithName('ptr_otherwise')
|
||||
self.tag = ptr_otherwise.GetChildMemberWithName('tag')
|
||||
self.payload = ptr_otherwise.Cast(tag_to_payload_map[self.tag.value]).GetChildMemberWithName('data').Clone('payload')
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 1 + (self.payload is not None)
|
||||
def get_child_index(self, name):
|
||||
try: return ('tag', 'payload').index(name)
|
||||
except: return -1
|
||||
def get_child_at_index(self, index): return (self.tag, self.payload)[index] if index in range(2) else None
|
||||
|
||||
def Module_Decl_name(decl):
|
||||
error = lldb.SBError()
|
||||
return decl.process.ReadCStringFromMemory(decl.GetChildMemberWithName('name').deref.load_addr, 256, error)
|
||||
|
||||
def Module_Namespace_RenderFullyQualifiedName(namespace):
|
||||
parent = namespace.GetChildMemberWithName('parent')
|
||||
if parent.unsigned < page_size: return zig_String_decode(namespace.GetChildMemberWithName('file_scope').GetChildMemberWithName('sub_file_path')).removesuffix('.zig').replace('/', '.')
|
||||
return '.'.join((Module_Namespace_RenderFullyQualifiedName(parent), Module_Decl_name(namespace.GetChildMemberWithName('ty').GetChildMemberWithName('payload').GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))))
|
||||
|
||||
def Module_Decl_RenderFullyQualifiedName(decl): return '.'.join((Module_Namespace_RenderFullyQualifiedName(decl.GetChildMemberWithName('src_namespace')), Module_Decl_name(decl)))
|
||||
|
||||
def OwnerDecl_RenderFullyQualifiedName(payload): return Module_Decl_RenderFullyQualifiedName(payload.GetChildMemberWithName('owner_decl').GetChildMemberWithName('decl'))
|
||||
|
||||
def InternPool_Find(thread):
|
||||
for frame in thread:
|
||||
ip = frame.FindVariable('ip') or frame.FindVariable('intern_pool')
|
||||
if ip: return ip
|
||||
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
|
||||
if mod:
|
||||
ip = mod.GetChildMemberWithName('intern_pool')
|
||||
if ip: return ip
|
||||
|
||||
class InternPool_Index_SynthProvider:
|
||||
def __init__(self, value, _=None): self.value = value
|
||||
def update(self):
|
||||
try:
|
||||
index_type = self.value.type
|
||||
for helper in self.value.target.FindFunctions('%s.dbHelper' % index_type.name, lldb.eFunctionNameTypeFull):
|
||||
ptr_self_type, ptr_tag_to_encoding_map_type = helper.function.type.GetFunctionArgumentTypes()
|
||||
if ptr_self_type.GetPointeeType() == index_type: break
|
||||
else: return
|
||||
tag_to_encoding_map = {field.name: field.type for field in ptr_tag_to_encoding_map_type.GetPointeeType().fields}
|
||||
|
||||
ip = InternPool_Find(self.value.thread)
|
||||
if not ip: return
|
||||
self.item = ip.GetChildMemberWithName('items').GetChildAtIndex(self.value.unsigned)
|
||||
extra = ip.GetChildMemberWithName('extra').GetChildMemberWithName('items')
|
||||
self.tag = self.item.GetChildMemberWithName('tag').Clone('tag')
|
||||
self.data = None
|
||||
self.trailing = None
|
||||
data = self.item.GetChildMemberWithName('data')
|
||||
encoding_type = tag_to_encoding_map[self.tag.value]
|
||||
dynamic_values = {}
|
||||
for encoding_field in encoding_type.fields:
|
||||
if encoding_field.name == 'data':
|
||||
if encoding_field.type.IsPointerType():
|
||||
extra_index = data.unsigned
|
||||
self.data = extra.GetChildAtIndex(extra_index).address_of.Cast(encoding_field.type).deref.Clone('data')
|
||||
extra_index += encoding_field.type.GetPointeeType().num_fields
|
||||
else:
|
||||
self.data = data.Cast(encoding_field.type).Clone('data')
|
||||
elif encoding_field.name == 'trailing':
|
||||
trailing_data = lldb.SBData()
|
||||
for trailing_field in encoding_field.type.fields:
|
||||
trailing_data.Append(extra.GetChildAtIndex(extra_index).address_of.data)
|
||||
trailing_len = dynamic_values['trailing.%s.len' % trailing_field.name].unsigned
|
||||
trailing_data.Append(lldb.SBData.CreateDataFromInt(trailing_len, trailing_data.GetAddressByteSize()))
|
||||
extra_index += trailing_len
|
||||
self.trailing = self.data.CreateValueFromData('trailing', trailing_data, encoding_field.type)
|
||||
else:
|
||||
for path in encoding_field.type.GetPointeeType().name.removeprefix('%s::' % encoding_type.name).removeprefix('%s.' % encoding_type.name).partition('__')[0].split(' orelse '):
|
||||
if path.startswith('data.'):
|
||||
root = self.data
|
||||
path = path[len('data'):]
|
||||
else: return
|
||||
dynamic_value = root.GetValueForExpressionPath(path)
|
||||
if dynamic_value:
|
||||
dynamic_values[encoding_field.name] = dynamic_value
|
||||
break
|
||||
except: pass
|
||||
def has_children(self): return True
|
||||
def num_children(self): return 2 + (self.trailing is not None)
|
||||
def get_child_index(self, name):
|
||||
try: return ('tag', 'data', 'trailing').index(name)
|
||||
except: return -1
|
||||
def get_child_at_index(self, index): return (self.tag, self.data, self.trailing)[index] if index in range(3) else None
|
||||
|
||||
def InternPool_NullTerminatedString_SummaryProvider(value, _=None):
|
||||
try:
|
||||
ip = InternPool_Find(value.thread)
|
||||
if not ip: return
|
||||
items = ip.GetChildMemberWithName('string_bytes').GetChildMemberWithName('items')
|
||||
b = bytearray()
|
||||
i = 0
|
||||
while True:
|
||||
x = items.GetChildAtIndex(value.unsigned + i).GetValueAsUnsigned()
|
||||
if x == 0: break
|
||||
b.append(x)
|
||||
i += 1
|
||||
s = b.decode(encoding='utf8', errors='backslashreplace')
|
||||
s1 = s if s.isprintable() else ''.join((c if c.isprintable() else '\\x%02x' % ord(c) for c in s))
|
||||
return '"%s"' % s1
|
||||
except:
|
||||
pass
|
||||
|
||||
def type_Type_pointer(payload):
|
||||
pointee_type = payload.GetChildMemberWithName('pointee_type')
|
||||
sentinel = payload.GetChildMemberWithName('sentinel').GetChildMemberWithName('child')
|
||||
align = payload.GetChildMemberWithName('align').unsigned
|
||||
addrspace = payload.GetChildMemberWithName('addrspace').value
|
||||
bit_offset = payload.GetChildMemberWithName('bit_offset').unsigned
|
||||
host_size = payload.GetChildMemberWithName('host_size').unsigned
|
||||
vector_index = payload.GetChildMemberWithName('vector_index')
|
||||
allowzero = payload.GetChildMemberWithName('allowzero').unsigned
|
||||
const = not payload.GetChildMemberWithName('mutable').unsigned
|
||||
volatile = payload.GetChildMemberWithName('volatile').unsigned
|
||||
size = payload.GetChildMemberWithName('size').value
|
||||
|
||||
if size == 'One': summary = '*'
|
||||
elif size == 'Many': summary = '[*'
|
||||
elif size == 'Slice': summary = '['
|
||||
elif size == 'C': summary = '[*c'
|
||||
if sentinel: summary += ':%s' % value_Value_SummaryProvider(sentinel)
|
||||
if size != 'One': summary += ']'
|
||||
if allowzero: summary += 'allowzero '
|
||||
if align != 0 or host_size != 0 or vector_index.value != 'none': summary += 'align(%d%s%s) ' % (align, ':%d:%d' % (bit_offset, host_size) if bit_offset != 0 or host_size != 0 else '', ':?' if vector_index.value == 'runtime' else ':%d' % vector_index.unsigned if vector_index.value != 'none' else '')
|
||||
if addrspace != 'generic': summary += 'addrspace(.%s) ' % addrspace
|
||||
if const: summary += 'const '
|
||||
if volatile: summary += 'volatile '
|
||||
summary += type_Type_SummaryProvider(pointee_type)
|
||||
return summary
|
||||
|
||||
def type_Type_function(payload):
|
||||
param_types = payload.GetChildMemberWithName('param_types').children
|
||||
comptime_params = payload.GetChildMemberWithName('comptime_params').GetPointeeData(0, len(param_types)).uint8
|
||||
return_type = payload.GetChildMemberWithName('return_type')
|
||||
alignment = payload.GetChildMemberWithName('alignment').unsigned
|
||||
noalias_bits = payload.GetChildMemberWithName('noalias_bits').unsigned
|
||||
cc = payload.GetChildMemberWithName('cc').value
|
||||
is_var_args = payload.GetChildMemberWithName('is_var_args').unsigned
|
||||
|
||||
return 'fn(%s)%s%s %s' % (', '.join(tuple(''.join(('comptime ' if comptime_param else '', 'noalias ' if noalias_bits & 1 << i else '', type_Type_SummaryProvider(param_type))) for i, (comptime_param, param_type) in enumerate(zip(comptime_params, param_types))) + (('...',) if is_var_args else ())), ' align(%d)' % alignment if alignment != 0 else '', ' callconv(.%s)' % cc if cc != 'Unspecified' else '', type_Type_SummaryProvider(return_type))
|
||||
|
||||
def type_Type_SummaryProvider(value, _=None):
|
||||
tag = value.GetChildMemberWithName('tag').value
|
||||
return type_tag_handlers.get(tag, lambda payload: tag)(value.GetChildMemberWithName('payload'))
|
||||
|
||||
type_tag_handlers = {
|
||||
'atomic_order': lambda payload: 'std.builtin.AtomicOrder',
|
||||
'atomic_rmw_op': lambda payload: 'std.builtin.AtomicRmwOp',
|
||||
'calling_convention': lambda payload: 'std.builtin.CallingConvention',
|
||||
'address_space': lambda payload: 'std.builtin.AddressSpace',
|
||||
'float_mode': lambda payload: 'std.builtin.FloatMode',
|
||||
'reduce_op': lambda payload: 'std.builtin.ReduceOp',
|
||||
'modifier': lambda payload: 'std.builtin.CallModifier',
|
||||
'prefetch_options': lambda payload: 'std.builtin.PrefetchOptions',
|
||||
'export_options': lambda payload: 'std.builtin.ExportOptions',
|
||||
'extern_options': lambda payload: 'std.builtin.ExternOptions',
|
||||
'type_info': lambda payload: 'std.builtin.Type',
|
||||
|
||||
'enum_literal': lambda payload: '@TypeOf(.enum_literal)',
|
||||
'null': lambda payload: '@TypeOf(null)',
|
||||
'undefined': lambda payload: '@TypeOf(undefined)',
|
||||
'empty_struct_literal': lambda payload: '@TypeOf(.{})',
|
||||
|
||||
'anyerror_void_error_union': lambda payload: 'anyerror!void',
|
||||
'slice_const_u8': lambda payload: '[]const u8',
|
||||
'slice_const_u8_sentinel_0': lambda payload: '[:0]const u8',
|
||||
'fn_noreturn_no_args': lambda payload: 'fn() noreturn',
|
||||
'fn_void_no_args': lambda payload: 'fn() void',
|
||||
'fn_naked_noreturn_no_args': lambda payload: 'fn() callconv(.Naked) noreturn',
|
||||
'fn_ccc_void_no_args': lambda payload: 'fn() callconv(.C) void',
|
||||
'single_const_pointer_to_comptime_int': lambda payload: '*const comptime_int',
|
||||
'manyptr_u8': lambda payload: '[*]u8',
|
||||
'manyptr_const_u8': lambda payload: '[*]const u8',
|
||||
'manyptr_const_u8_sentinel_0': lambda payload: '[*:0]const u8',
|
||||
|
||||
'function': type_Type_function,
|
||||
'error_union': lambda payload: '%s!%s' % (type_Type_SummaryProvider(payload.GetChildMemberWithName('error_set')), type_Type_SummaryProvider(payload.GetChildMemberWithName('payload'))),
|
||||
'array_u8': lambda payload: '[%d]u8' % payload.unsigned,
|
||||
'array_u8_sentinel_0': lambda payload: '[%d:0]u8' % payload.unsigned,
|
||||
'vector': lambda payload: '@Vector(%d, %s)' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
|
||||
'array': lambda payload: '[%d]%s' % (payload.GetChildMemberWithName('len').unsigned, type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
|
||||
'array_sentinel': lambda payload: '[%d:%s]%s' % (payload.GetChildMemberWithName('len').unsigned, value_Value_SummaryProvider(payload.GetChildMemberWithName('sentinel')), type_Type_SummaryProvider(payload.GetChildMemberWithName('elem_type'))),
|
||||
'tuple': lambda payload: 'tuple{%s}' % ', '.join(('comptime %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s') % type_Type_SummaryProvider(type) for type, value in zip(payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
|
||||
'anon_struct': lambda payload: 'struct{%s}' % ', '.join(('comptime %%s: %%s = %s' % value_Value_SummaryProvider(value) if value.GetChildMemberWithName('tag').value != 'unreachable_value' else '%s: %s') % (zig_String_AsIdentifier(name, zig_IsFieldName), type_Type_SummaryProvider(type)) for name, type, value in zip(payload.GetChildMemberWithName('names').children, payload.GetChildMemberWithName('types').children, payload.GetChildMemberWithName('values').children)),
|
||||
'pointer': type_Type_pointer,
|
||||
'single_const_pointer': lambda payload: '*const %s' % type_Type_SummaryProvider(payload),
|
||||
'single_mut_pointer': lambda payload: '*%s' % type_Type_SummaryProvider(payload),
|
||||
'many_const_pointer': lambda payload: '[*]const %s' % type_Type_SummaryProvider(payload),
|
||||
'many_mut_pointer': lambda payload: '[*]%s' % type_Type_SummaryProvider(payload),
|
||||
'c_const_pointer': lambda payload: '[*c]const %s' % type_Type_SummaryProvider(payload),
|
||||
'c_mut_pointer': lambda payload: '[*c]%s' % type_Type_SummaryProvider(payload),
|
||||
'slice_const': lambda payload: '[]const %s' % type_Type_SummaryProvider(payload),
|
||||
'mut_slice': lambda payload: '[]%s' % type_Type_SummaryProvider(payload),
|
||||
'int_signed': lambda payload: 'i%d' % payload.unsigned,
|
||||
'int_unsigned': lambda payload: 'u%d' % payload.unsigned,
|
||||
'optional': lambda payload: '?%s' % type_Type_SummaryProvider(payload),
|
||||
'optional_single_mut_pointer': lambda payload: '?*%s' % type_Type_SummaryProvider(payload),
|
||||
'optional_single_const_pointer': lambda payload: '?*const %s' % type_Type_SummaryProvider(payload),
|
||||
'anyframe_T': lambda payload: 'anyframe->%s' % type_Type_SummaryProvider(payload),
|
||||
'error_set': lambda payload: type_tag_handlers['error_set_merged'](payload.GetChildMemberWithName('names')),
|
||||
'error_set_single': lambda payload: 'error{%s}' % zig_String_AsIdentifier(payload, zig_IsFieldName),
|
||||
'error_set_merged': lambda payload: 'error{%s}' % ','.join(zig_String_AsIdentifier(child.GetChildMemberWithName('key'), zig_IsFieldName) for child in payload.GetChildMemberWithName('entries').children),
|
||||
'error_set_inferred': lambda payload: '@typeInfo(@typeInfo(@TypeOf(%s)).@"fn".return_type.?).error_union.error_set' % OwnerDecl_RenderFullyQualifiedName(payload.GetChildMemberWithName('func')),
|
||||
|
||||
'enum_full': OwnerDecl_RenderFullyQualifiedName,
|
||||
'enum_nonexhaustive': OwnerDecl_RenderFullyQualifiedName,
|
||||
'enum_numbered': OwnerDecl_RenderFullyQualifiedName,
|
||||
'enum_simple': OwnerDecl_RenderFullyQualifiedName,
|
||||
'struct': OwnerDecl_RenderFullyQualifiedName,
|
||||
'union': OwnerDecl_RenderFullyQualifiedName,
|
||||
'union_safety_tagged': OwnerDecl_RenderFullyQualifiedName,
|
||||
'union_tagged': OwnerDecl_RenderFullyQualifiedName,
|
||||
'opaque': OwnerDecl_RenderFullyQualifiedName,
|
||||
}
|
||||
|
||||
def value_Value_str_lit(payload):
|
||||
for frame in payload.thread:
|
||||
mod = frame.FindVariable('zcu') or frame.FindVariable('mod') or frame.FindVariable('module')
|
||||
if mod: break
|
||||
else: return
|
||||
return '"%s"' % zig_String_decode(mod.GetChildMemberWithName('string_literal_bytes').GetChildMemberWithName('items'), payload.GetChildMemberWithName('index').unsigned, payload.GetChildMemberWithName('len').unsigned)
|
||||
|
||||
def value_Value_SummaryProvider(value, _=None):
|
||||
tag = value.GetChildMemberWithName('tag').value
|
||||
return value_tag_handlers.get(tag, lambda payload: tag.removesuffix('_type'))(value.GetChildMemberWithName('payload'))
|
||||
|
||||
value_tag_handlers = {
|
||||
'undef': lambda payload: 'undefined',
|
||||
'zero': lambda payload: '0',
|
||||
'one': lambda payload: '1',
|
||||
'void_value': lambda payload: '{}',
|
||||
'unreachable_value': lambda payload: 'unreachable',
|
||||
'null_value': lambda payload: 'null',
|
||||
'bool_true': lambda payload: 'true',
|
||||
'bool_false': lambda payload: 'false',
|
||||
|
||||
'empty_struct_value': lambda payload: '.{}',
|
||||
'empty_array': lambda payload: '.{}',
|
||||
|
||||
'ty': type_Type_SummaryProvider,
|
||||
'int_type': lambda payload: '%c%d' % (payload.GetChildMemberWithName('bits').unsigned, 's' if payload.GetChildMemberWithName('signed').unsigned == 1 else 'u'),
|
||||
'int_u64': lambda payload: '%d' % payload.unsigned,
|
||||
'int_i64': lambda payload: '%d' % payload.signed,
|
||||
'int_big_positive': lambda payload: sum(child.unsigned << i * child.type.size * 8 for i, child in enumerate(payload.children)),
|
||||
'int_big_negative': lambda payload: '-%s' % value_tag_handlers['int_big_positive'](payload),
|
||||
'function': OwnerDecl_RenderFullyQualifiedName,
|
||||
'extern_fn': OwnerDecl_RenderFullyQualifiedName,
|
||||
'variable': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
|
||||
'runtime_value': value_Value_SummaryProvider,
|
||||
'decl_ref': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl').GetChildMemberWithName('val')),
|
||||
'decl_ref_mut': lambda payload: value_Value_SummaryProvider(payload.GetChildMemberWithName('decl_index').GetChildMemberWithName('decl').GetChildMemberWithName('val')),
|
||||
'comptime_field_ptr': lambda payload: '&%s' % value_Value_SummaryProvider(payload.GetChildMemberWithName('field_val')),
|
||||
'elem_ptr': lambda payload: '(%s)[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('array_ptr')), payload.GetChildMemberWithName('index').unsigned),
|
||||
'field_ptr': lambda payload: '(%s).field[%d]' % (value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')), payload.GetChildMemberWithName('field_index').unsigned),
|
||||
'bytes': lambda payload: '"%s"' % zig_String_decode(payload),
|
||||
'str_lit': value_Value_str_lit,
|
||||
'repeated': lambda payload: '.{%s} ** _' % value_Value_SummaryProvider(payload),
|
||||
'empty_array_sentinel': lambda payload: '.{%s}' % value_Value_SummaryProvider(payload),
|
||||
'slice': lambda payload: '(%s)[0..%s]' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('ptr', 'len')),
|
||||
'float_16': lambda payload: payload.value,
|
||||
'float_32': lambda payload: payload.value,
|
||||
'float_64': lambda payload: payload.value,
|
||||
'float_80': lambda payload: payload.value,
|
||||
'float_128': lambda payload: payload.value,
|
||||
'enum_literal': lambda payload: '.%s' % zig_String_AsIdentifier(payload, zig_IsFieldName),
|
||||
'enum_field_index': lambda payload: 'field[%d]' % payload.unsigned,
|
||||
'error': lambda payload: 'error.%s' % zig_String_AsIdentifier(payload.GetChildMemberWithName('name'), zig_IsFieldName),
|
||||
'eu_payload': value_Value_SummaryProvider,
|
||||
'eu_payload_ptr': lambda payload: '&((%s).* catch unreachable)' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
|
||||
'opt_payload': value_Value_SummaryProvider,
|
||||
'opt_payload_ptr': lambda payload: '&(%s).*.?' % value_Value_SummaryProvider(payload.GetChildMemberWithName('container_ptr')),
|
||||
'aggregate': lambda payload: '.{%s}' % ', '.join(map(value_Value_SummaryProvider, payload.children)),
|
||||
'union': lambda payload: '.{.%s = %s}' % tuple(value_Value_SummaryProvider(payload.GetChildMemberWithName(name)) for name in ('tag', 'val')),
|
||||
|
||||
'lazy_align': lambda payload: '@alignOf(%s)' % type_Type_SummaryProvider(payload),
|
||||
'lazy_size': lambda payload: '@sizeOf(%s)' % type_Type_SummaryProvider(payload),
|
||||
}
|
||||
|
||||
# Initialize
|
||||
|
||||
def add(debugger, *, category, regex=False, type, identifier=None, synth=False, inline_children=False, expand=False, summary=False):
|
||||
prefix = '.'.join((__name__, (identifier or type).replace('.', '_').replace(':', '_')))
|
||||
if summary: debugger.HandleCommand('type summary add --category %s%s%s "%s"' % (category, ' --inline-children' if inline_children else ''.join((' --expand' if expand else '', ' --python-function %s_SummaryProvider' % prefix if summary == True else ' --summary-string "%s"' % summary)), ' --regex' if regex else '', type))
|
||||
if synth: debugger.HandleCommand('type synthetic add --category %s%s --python-class %s_SynthProvider "%s"' % (category, ' --regex' if regex else '', prefix, type))
|
||||
|
||||
def MultiArrayList_Entry(type): return '^multi_array_list\\.MultiArrayList\\(%s\\)\\.Entry__struct_[1-9][0-9]*$' % type
|
||||
|
||||
def __lldb_init_module(debugger, _=None):
|
||||
# Initialize Zig Categories
|
||||
debugger.HandleCommand('type category define --language c99 zig.lang zig.std')
|
||||
|
||||
# Initialize Zig Language
|
||||
add(debugger, category='zig.lang', regex=True, type='^\\[\\]', identifier='zig_Slice', synth=True, expand=True, summary='len=${svar%#}')
|
||||
add(debugger, category='zig.lang', type='[]u8', identifier='zig_String', summary=True)
|
||||
add(debugger, category='zig.lang', regex=True, type='^\\?', identifier='zig_Optional', synth=True, summary=True)
|
||||
add(debugger, category='zig.lang', regex=True, type='^(error{.*}|anyerror)!', identifier='zig_ErrorUnion', synth=True, inline_children=True, summary=True)
|
||||
|
||||
# Initialize Zig Standard Library
|
||||
add(debugger, category='zig.std', type='mem.Allocator', summary='${var.ptr}')
|
||||
add(debugger, category='zig.std', regex=True, type='^segmented_list\\.SegmentedList\\(.*\\)$', identifier='std_SegmentedList', synth=True, expand=True, summary='len=${var.len}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)$', identifier='std_MultiArrayList', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type='^multi_array_list\\.MultiArrayList\\(.*\\)\\.Slice$', identifier='std_MultiArrayList_Slice', synth=True, expand=True, summary='len=${var.len} capacity=${var.capacity}')
|
||||
add(debugger, category='zig.std', regex=True, type=MultiArrayList_Entry('.*'), identifier='std_Entry', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)$', identifier='std_HashMapUnmanaged', synth=True, expand=True, summary=True)
|
||||
add(debugger, category='zig.std', regex=True, type='^hash_map\\.HashMapUnmanaged\\(.*\\)\\.Entry$', identifier = 'std_Entry', synth=True, inline_children=True, summary=True)
|
||||
|
||||
# Initialize Zig Stage2 Compiler
|
||||
add(debugger, category='zig.stage2', type='Zir.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Zir\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', regex=True, type='^Zir\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Ref', identifier='InstRef', summary=True)
|
||||
add(debugger, category='zig.stage2', type='Zir.Inst::Zir.Inst.Index', identifier='InstIndex', summary=True)
|
||||
add(debugger, category='zig.stage2', type='Air.Inst', identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Ref', identifier='InstRef', summary=True)
|
||||
add(debugger, category='zig.stage2', type='Air.Inst::Air.Inst.Index', identifier='InstIndex', summary=True)
|
||||
add(debugger, category='zig.stage2', regex=True, type=MultiArrayList_Entry('Air\\.Inst'), identifier='TagAndPayload', synth=True, inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', regex=True, type='^Air\\.Inst\\.Data\\.Data__struct_[1-9][0-9]*$', inline_children=True, summary=True)
|
||||
add(debugger, category='zig.stage2', type='zig.DeclIndex', synth=True)
|
||||
add(debugger, category='zig.stage2', type='Module.Namespace::Module.Namespace.Index', synth=True)
|
||||
add(debugger, category='zig.stage2', type='Module.LazySrcLoc', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Index', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.NullTerminatedString', summary=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key.Int.Storage', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key.ErrorUnion.Value', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key.Float.Storage', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key.Ptr.Addr', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='InternPool.Key.Aggregate.Storage', identifier='zig_TaggedUnion', synth=True)
|
||||
add(debugger, category='zig.stage2', type='arch.x86_64.CodeGen.MCValue', identifier='zig_TaggedUnion', synth=True, inline_children=True, summary=True)
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "bun",
|
||||
"version": "1.1.42",
|
||||
"version": "1.1.39",
|
||||
"workspaces": [
|
||||
"./packages/bun-types"
|
||||
],
|
||||
@@ -21,7 +21,7 @@
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"source-map-js": "^1.2.0",
|
||||
"typescript": "^5.7.2",
|
||||
"typescript": "^5.4.5",
|
||||
"caniuse-lite": "^1.0.30001620",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"@mdn/browser-compat-data": "~5.5.28"
|
||||
|
||||
@@ -1,25 +1,55 @@
|
||||
use bun_native_plugin::{anyhow, bun, define_bun_plugin, BunLoader, Result};
|
||||
use bun_native_plugin::{define_bun_plugin, BunLoader, OnBeforeParse};
|
||||
use mdxjs::{compile, Options as CompileOptions};
|
||||
use napi_derive::napi;
|
||||
|
||||
#[macro_use]
|
||||
extern crate napi;
|
||||
|
||||
define_bun_plugin!("bun-mdx-rs");
|
||||
|
||||
#[bun]
|
||||
pub fn bun_mdx_rs(handle: &mut OnBeforeParse) -> Result<()> {
|
||||
let source_str = handle.input_source_code()?;
|
||||
#[no_mangle]
|
||||
pub extern "C" fn bun_mdx_rs(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
|
||||
let mut handle = match OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let source_str = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to fetch source code");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let mut options = CompileOptions::gfm();
|
||||
|
||||
// Leave it as JSX for Bun to handle
|
||||
options.jsx = true;
|
||||
|
||||
let path = handle.path()?;
|
||||
let path = match handle.path() {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
handle.log_error(&format!("Failed to get path: {:?}", e));
|
||||
return;
|
||||
}
|
||||
};
|
||||
options.filepath = Some(path.to_string());
|
||||
|
||||
let jsx = compile(&source_str, &options)
|
||||
.map_err(|e| anyhow::anyhow!("Failed to compile MDX: {:?}", e))?;
|
||||
|
||||
handle.set_output_source_code(jsx, BunLoader::BUN_LOADER_JSX);
|
||||
|
||||
Ok(())
|
||||
match compile(&source_str, &options) {
|
||||
Ok(compiled) => {
|
||||
handle.set_output_source_code(compiled, BunLoader::BUN_LOADER_JSX);
|
||||
}
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to compile MDX");
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +116,6 @@ try {
|
||||
entrypoints: [join(import.meta.dir, "out/manifest.js")],
|
||||
outdir: "out",
|
||||
minify: true,
|
||||
throw: true,
|
||||
});
|
||||
const jsFilename = "manifest-" + jsBundle.outputs[0].hash + ".js";
|
||||
// const cssBundle = await build({
|
||||
|
||||
99
packages/bun-native-plugin-rs/Cargo.lock
generated
99
packages/bun-native-plugin-rs/Cargo.lock
generated
@@ -11,12 +11,6 @@ dependencies = [
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyhow"
|
||||
version = "1.0.94"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7"
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.70.1"
|
||||
@@ -43,24 +37,11 @@ version = "2.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
|
||||
|
||||
[[package]]
|
||||
name = "bun-macro"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"napi",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bun-native-plugin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bindgen",
|
||||
"bun-macro",
|
||||
"napi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -89,25 +70,6 @@ dependencies = [
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "convert_case"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca"
|
||||
dependencies = [
|
||||
"unicode-segmentation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "either"
|
||||
version = "1.13.0"
|
||||
@@ -163,55 +125,6 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "napi"
|
||||
version = "2.16.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "214f07a80874bb96a8433b3cdfc84980d56c7b02e1a0d7ba4ba0db5cef785e2b"
|
||||
dependencies = [
|
||||
"bitflags",
|
||||
"ctor",
|
||||
"napi-derive",
|
||||
"napi-sys",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive"
|
||||
version = "2.16.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7cbe2585d8ac223f7d34f13701434b9d5f4eb9c332cccce8dee57ea18ab8ab0c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"convert_case",
|
||||
"napi-derive-backend",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-derive-backend"
|
||||
version = "1.0.75"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1639aaa9eeb76e91c6ae66da8ce3e89e921cd3885e99ec85f4abacae72fc91bf"
|
||||
dependencies = [
|
||||
"convert_case",
|
||||
"once_cell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "napi-sys"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "427802e8ec3a734331fec1035594a210ce1ff4dc5bc1950530920ab717964ea3"
|
||||
dependencies = [
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
@@ -222,12 +135,6 @@ dependencies = [
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.25"
|
||||
@@ -314,12 +221,6 @@ version = "1.0.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-segmentation"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
|
||||
@@ -5,13 +5,3 @@ edition = "2021"
|
||||
|
||||
[build-dependencies]
|
||||
bindgen = "0.70.1"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.94"
|
||||
bun-macro = { path = "./bun-macro" }
|
||||
napi = { version = "2.14.1", default-features = false, features = ["napi4"] }
|
||||
|
||||
[features]
|
||||
default = ["napi"]
|
||||
napi = []
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
|
||||
> ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
|
||||
|
||||
# Bun Native Plugins
|
||||
|
||||
This crate provides a Rustified wrapper over the Bun's native bundler plugin C API.
|
||||
|
||||
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS are:
|
||||
Some advantages to _native_ bundler plugins as opposed to regular ones implemented in JS:
|
||||
|
||||
- Native plugins take full advantage of Bun's parallelized bundler pipeline and run on multiple threads at the same time
|
||||
- Unlike JS, native plugins don't need to do the UTF-8 <-> UTF-16 source code string conversions
|
||||
@@ -30,84 +30,61 @@ Then install this crate:
|
||||
cargo add bun-native-plugin
|
||||
```
|
||||
|
||||
Now, inside the `lib.rs` file, we'll use the `bun_native_plugin::bun` proc macro to define a function which
|
||||
will implement our native plugin.
|
||||
Now, inside the `lib.rs` file, expose a C ABI function which has the same function signature as the plugin lifecycle hook that you want to implement.
|
||||
|
||||
Here's an example implementing the `onBeforeParse` hook:
|
||||
For example, implementing `onBeforeParse`:
|
||||
|
||||
```rs
|
||||
use bun_native_plugin::{define_bun_plugin, OnBeforeParse, bun, Result, anyhow, BunLoader};
|
||||
use bun_native_plugin::{define_bun_plugin, OnBeforeParse};
|
||||
use napi_derive::napi;
|
||||
|
||||
/// Define the plugin and its name
|
||||
/// Define with the name of the plugin
|
||||
define_bun_plugin!("replace-foo-with-bar");
|
||||
|
||||
/// Here we'll implement `onBeforeParse` with code that replaces all occurrences of
|
||||
/// `foo` with `bar`.
|
||||
/// This is necessary for napi-rs to compile this into a proper NAPI module
|
||||
#[napi]
|
||||
pub fn register_bun_plugin() {}
|
||||
|
||||
/// Use `no_mangle` so that we can reference this symbol by name later
|
||||
/// when registering this native plugin in JS.
|
||||
///
|
||||
/// We use the #[bun] macro to generate some of the boilerplate code.
|
||||
///
|
||||
/// The argument of the function (`handle: &mut OnBeforeParse`) tells
|
||||
/// the macro that this function implements the `onBeforeParse` hook.
|
||||
#[bun]
|
||||
pub fn replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
|
||||
// Fetch the input source code.
|
||||
let input_source_code = handle.input_source_code()?;
|
||||
|
||||
// Get the Loader for the file
|
||||
let loader = handle.output_loader();
|
||||
|
||||
|
||||
let output_source_code = input_source_code.replace("foo", "bar");
|
||||
|
||||
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
```
|
||||
|
||||
Internally, the `#[bun]` macro wraps your code and declares a C ABI function which implements
|
||||
the function signature of `onBeforeParse` plugins in Bun's C API for bundler plugins.
|
||||
|
||||
Then it calls your code. The wrapper looks _roughly_ like this:
|
||||
|
||||
```rs
|
||||
pub extern "C" fn replace_foo_with_bar(
|
||||
/// Here we'll create a dummy plugin which replaces all occurences of
|
||||
/// `foo` with `bar`
|
||||
#[no_mangle]
|
||||
pub extern "C" fn on_before_parse_plugin_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
// The actual code you wrote is inlined here
|
||||
fn __replace_foo_with_bar(handle: &mut OnBeforeParse) -> Result<()> {
|
||||
// Fetch the input source code.
|
||||
let input_source_code = handle.input_source_code()?;
|
||||
|
||||
// Get the Loader for the file
|
||||
let loader = handle.output_loader();
|
||||
|
||||
|
||||
let output_source_code = input_source_code.replace("foo", "bar");
|
||||
|
||||
handle.set_output_source_code(output_source_code, BunLoader::BUN_LOADER_JSX);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let args = unsafe { &*args };
|
||||
|
||||
// This returns a handle which is a safe wrapper over the raw
|
||||
// C API.
|
||||
let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
// `OnBeforeParse::from_raw` handles error logging
|
||||
// so it fine to return here.
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if let Err(e) = __replace_fo_with_bar(&handle) {
|
||||
handle.log_err(&e.to_string());
|
||||
}
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
// If we encounter an error, we must log it so that
|
||||
// Bun knows this plugin failed.
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let loader = handle.output_loader();
|
||||
let output_source_code = source_str.replace("foo", "bar");
|
||||
handle.set_output_source_code(output_source_code, loader);
|
||||
}
|
||||
```
|
||||
|
||||
Now, let's compile this NAPI module. If you're using napi-rs, the `package.json` should have a `build` script you can run:
|
||||
Then compile this NAPI module. If you using napi-rs, the `package.json` should have a `build` script you can run:
|
||||
|
||||
```bash
|
||||
bun run build
|
||||
@@ -130,7 +107,7 @@ const result = await Bun.build({
|
||||
// We tell it to use function we implemented inside of our `lib.rs` code.
|
||||
build.onBeforeParse(
|
||||
{ filter: /\.ts/ },
|
||||
{ napiModule, symbol: "replace_foo_with_bar" },
|
||||
{ napiModule, symbol: "on_before_parse_plugin_impl" },
|
||||
);
|
||||
},
|
||||
},
|
||||
@@ -142,14 +119,19 @@ const result = await Bun.build({
|
||||
|
||||
### Error handling and panics
|
||||
|
||||
In the case that the value of the `Result` your plugin function returns is an `Err(...)`, the error will be logged to Bun's bundler.
|
||||
It is highly recommended to avoid panicking as this will crash the runtime. Instead, you must handle errors and log them:
|
||||
|
||||
It is highly advised that you return all errors and avoid `.unwrap()`'ing or `.expecting()`'ing results.
|
||||
|
||||
The `#[bun]` wrapper macro actually runs your code inside of a [`panic::catch_unwind`](https://doc.rust-lang.org/std/panic/fn.catch_unwind.html),
|
||||
which may catch _some_ panics but **not guaranteed to catch all panics**.
|
||||
|
||||
Therefore, it is recommended to **avoid panics at all costs**.
|
||||
```rs
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
// If we encounter an error, we must log it so that
|
||||
// Bun knows this plugin failed.
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Passing state to and from JS: `External`
|
||||
|
||||
@@ -217,16 +199,41 @@ console.log("Total `foo`s encountered: ", pluginState.getFooCount());
|
||||
Finally, from the native implementation of your plugin, you can extract the external:
|
||||
|
||||
```rs
|
||||
#[bun]
|
||||
pub fn on_before_parse_plugin_impl(handle: &mut OnBeforeParse) {
|
||||
pub extern "C" fn on_before_parse_plugin_impl(
|
||||
args: *const bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
let args = unsafe { &*args };
|
||||
|
||||
let mut handle = OnBeforeParse::from_raw(args, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => {
|
||||
// `OnBeforeParse::from_raw` handles error logging
|
||||
// so it fine to return here.
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let plugin_state: &PluginState =
|
||||
// This operation is only safe if you pass in an external when registering the plugin.
|
||||
// If you don't, this could lead to a segfault or access of undefined memory.
|
||||
let plugin_state: &PluginState =
|
||||
unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown))? };
|
||||
match unsafe { handle.external().and_then(|state| state.ok_or(Error::Unknown)) } {
|
||||
Ok(state) => state,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to get external!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
// Fetch our source code again
|
||||
let input_source_code = handle.input_source_code()?;
|
||||
let input_source_code = match handle.input_source_code() {
|
||||
Ok(source_str) => source_str,
|
||||
Err(_) => {
|
||||
handle.log_error("Failed to fetch source code!");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
// Count the number of `foo`s and add it to our state
|
||||
let foo_count = source_code.matches("foo").count() as u32;
|
||||
@@ -236,6 +243,6 @@ pub fn on_before_parse_plugin_impl(handle: &mut OnBeforeParse) {
|
||||
|
||||
### Concurrency
|
||||
|
||||
Your plugin function can be called _on any thread_ at _any time_ and possibly _multiple times at once_.
|
||||
Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
|
||||
|
||||
Therefore, you must design any state management to be threadsafe.
|
||||
Therefore, you must design any state management to be threadsafe
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
|
||||
[package]
|
||||
name = "bun-macro"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
syn = { version = "2.0", features = ["full"] }
|
||||
quote = "1.0"
|
||||
napi = "2.16.13"
|
||||
anyhow = "1.0.94"
|
||||
@@ -1,54 +0,0 @@
|
||||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, Ident, ItemFn};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn bun(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
// Parse the input function
|
||||
let input_fn = parse_macro_input!(item as ItemFn);
|
||||
let fn_name = &input_fn.sig.ident;
|
||||
let inner_fn_name = Ident::new(&format!("__{}", fn_name), fn_name.span());
|
||||
let fn_block = &input_fn.block;
|
||||
|
||||
// Generate the wrapped function
|
||||
let output = quote! {
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn #fn_name(
|
||||
args_raw: *mut bun_native_plugin::sys::OnBeforeParseArguments,
|
||||
result: *mut bun_native_plugin::sys::OnBeforeParseResult,
|
||||
) {
|
||||
fn #inner_fn_name(handle: &mut bun_native_plugin::OnBeforeParse) -> Result<()> {
|
||||
#fn_block
|
||||
}
|
||||
|
||||
let args_path = unsafe { (*args_raw).path_ptr };
|
||||
let args_path_len = unsafe { (*args_raw).path_len };
|
||||
let result_pointer = result;
|
||||
|
||||
let result = std::panic::catch_unwind(|| {
|
||||
let mut handle = match bun_native_plugin::OnBeforeParse::from_raw(args_raw, result) {
|
||||
Ok(handle) => handle,
|
||||
Err(_) => return,
|
||||
};
|
||||
if let Err(e) = #inner_fn_name(&mut handle) {
|
||||
handle.log_error(&format!("{:?}", e));
|
||||
}
|
||||
});
|
||||
|
||||
if let Err(e) = result {
|
||||
let msg_string = format!("Plugin crashed: {:?}", e);
|
||||
let mut log_options = bun_native_plugin::log_from_message_and_level(
|
||||
&msg_string,
|
||||
bun_native_plugin::sys::BunLogLevel::BUN_LOG_LEVEL_ERROR,
|
||||
args_path,
|
||||
args_path_len,
|
||||
);
|
||||
unsafe {
|
||||
((*result_pointer).log.unwrap())(args_raw, &mut log_options);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
output.into()
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems programming and the C ABI. Use with caution.
|
||||
//! > ⚠️ Note: This is an advanced and experimental API recommended only for plugin developers who are familiar with systems proramming and the C ABI. Use with caution.
|
||||
//!
|
||||
//! # Bun Native Plugins
|
||||
//!
|
||||
@@ -44,7 +44,7 @@
|
||||
//! /// Use `no_mangle` so that we can reference this symbol by name later
|
||||
//! /// when registering this native plugin in JS.
|
||||
//! ///
|
||||
//! /// Here we'll create a dummy plugin which replaces all occurrences of
|
||||
//! /// Here we'll create a dummy plugin which replaces all occurences of
|
||||
//! /// `foo` with `bar`
|
||||
//! #[no_mangle]
|
||||
//! pub extern "C" fn on_before_parse_plugin_impl(
|
||||
@@ -244,11 +244,10 @@
|
||||
//! Your `extern "C"` plugin function can be called _on any thread_ at _any time_ and _multiple times at once_.
|
||||
//!
|
||||
//! Therefore, you must design any state management to be threadsafe
|
||||
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(non_snake_case)]
|
||||
pub use anyhow;
|
||||
pub use bun_macro::bun;
|
||||
|
||||
#[repr(transparent)]
|
||||
pub struct BunPluginName(*const c_char);
|
||||
@@ -262,7 +261,7 @@ impl BunPluginName {
|
||||
#[macro_export]
|
||||
macro_rules! define_bun_plugin {
|
||||
($name:expr) => {
|
||||
pub static BUN_PLUGIN_NAME_STRING: &str = concat!($name, "\0");
|
||||
pub static BUN_PLUGIN_NAME_STRING: &str = $name;
|
||||
|
||||
#[no_mangle]
|
||||
pub static BUN_PLUGIN_NAME: bun_native_plugin::BunPluginName =
|
||||
@@ -280,9 +279,7 @@ use std::{
|
||||
borrow::Cow,
|
||||
cell::UnsafeCell,
|
||||
ffi::{c_char, c_void},
|
||||
marker::PhantomData,
|
||||
str::Utf8Error,
|
||||
sync::PoisonError,
|
||||
};
|
||||
|
||||
pub mod sys {
|
||||
@@ -326,7 +323,7 @@ impl Drop for SourceCodeContext {
|
||||
pub type BunLogLevel = sys::BunLogLevel;
|
||||
pub type BunLoader = sys::BunLoader;
|
||||
|
||||
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> PluginResult<Cow<'a, str>> {
|
||||
fn get_from_raw_str<'a>(ptr: *const u8, len: usize) -> Result<Cow<'a, str>> {
|
||||
let slice: &'a [u8] = unsafe { std::slice::from_raw_parts(ptr, len) };
|
||||
|
||||
// Windows allows invalid UTF-16 strings in the filesystem. These get converted to WTF-8 in Zig.
|
||||
@@ -354,31 +351,9 @@ pub enum Error {
|
||||
IncompatiblePluginVersion,
|
||||
ExternalTypeMismatch,
|
||||
Unknown,
|
||||
LockPoisoned,
|
||||
}
|
||||
|
||||
pub type PluginResult<T> = std::result::Result<T, Error>;
|
||||
pub type Result<T> = anyhow::Result<T>;
|
||||
|
||||
impl std::fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {
|
||||
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
|
||||
None
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"description() is deprecated; use Display"
|
||||
}
|
||||
|
||||
fn cause(&self) -> Option<&dyn std::error::Error> {
|
||||
self.source()
|
||||
}
|
||||
}
|
||||
pub type Result<T> = std::result::Result<T, Error>;
|
||||
|
||||
impl From<Utf8Error> for Error {
|
||||
fn from(value: Utf8Error) -> Self {
|
||||
@@ -386,12 +361,6 @@ impl From<Utf8Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl<Guard> From<PoisonError<Guard>> for Error {
|
||||
fn from(_: PoisonError<Guard>) -> Self {
|
||||
Self::LockPoisoned
|
||||
}
|
||||
}
|
||||
|
||||
/// A safe handle for the arguments + result struct for the
|
||||
/// `OnBeforeParse` bundler lifecycle hook.
|
||||
///
|
||||
@@ -401,10 +370,9 @@ impl<Guard> From<PoisonError<Guard>> for Error {
|
||||
///
|
||||
/// To initialize this struct, see the `from_raw` method.
|
||||
pub struct OnBeforeParse<'a> {
|
||||
pub args_raw: *mut sys::OnBeforeParseArguments,
|
||||
args_raw: &'a sys::OnBeforeParseArguments,
|
||||
result_raw: *mut sys::OnBeforeParseResult,
|
||||
compilation_context: *mut SourceCodeContext,
|
||||
__phantom: PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a> OnBeforeParse<'a> {
|
||||
@@ -426,10 +394,10 @@ impl<'a> OnBeforeParse<'a> {
|
||||
/// }
|
||||
/// ```
|
||||
pub fn from_raw(
|
||||
args: *mut sys::OnBeforeParseArguments,
|
||||
args: &'a sys::OnBeforeParseArguments,
|
||||
result: *mut sys::OnBeforeParseResult,
|
||||
) -> PluginResult<Self> {
|
||||
if unsafe { (*args).__struct_size } < std::mem::size_of::<sys::OnBeforeParseArguments>()
|
||||
) -> Result<Self> {
|
||||
if args.__struct_size < std::mem::size_of::<sys::OnBeforeParseArguments>()
|
||||
|| unsafe { (*result).__struct_size } < std::mem::size_of::<sys::OnBeforeParseResult>()
|
||||
{
|
||||
let message = "This plugin is not compatible with the current version of Bun.";
|
||||
@@ -437,8 +405,8 @@ impl<'a> OnBeforeParse<'a> {
|
||||
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
|
||||
message_ptr: message.as_ptr(),
|
||||
message_len: message.len(),
|
||||
path_ptr: unsafe { (*args).path_ptr },
|
||||
path_len: unsafe { (*args).path_len },
|
||||
path_ptr: args.path_ptr,
|
||||
path_len: args.path_len,
|
||||
source_line_text_ptr: std::ptr::null(),
|
||||
source_line_text_len: 0,
|
||||
level: BunLogLevel::BUN_LOG_LEVEL_ERROR as i8,
|
||||
@@ -458,21 +426,15 @@ impl<'a> OnBeforeParse<'a> {
|
||||
args_raw: args,
|
||||
result_raw: result,
|
||||
compilation_context: std::ptr::null_mut() as *mut _,
|
||||
__phantom: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn path(&self) -> PluginResult<Cow<'_, str>> {
|
||||
unsafe { get_from_raw_str((*self.args_raw).path_ptr, (*self.args_raw).path_len) }
|
||||
pub fn path(&self) -> Result<Cow<'_, str>> {
|
||||
get_from_raw_str(self.args_raw.path_ptr, self.args_raw.path_len)
|
||||
}
|
||||
|
||||
pub fn namespace(&self) -> PluginResult<Cow<'_, str>> {
|
||||
unsafe {
|
||||
get_from_raw_str(
|
||||
(*self.args_raw).namespace_ptr,
|
||||
(*self.args_raw).namespace_len,
|
||||
)
|
||||
}
|
||||
pub fn namespace(&self) -> Result<Cow<'_, str>> {
|
||||
get_from_raw_str(self.args_raw.namespace_ptr, self.args_raw.namespace_len)
|
||||
}
|
||||
|
||||
/// Get the external object from the `OnBeforeParse` arguments.
|
||||
@@ -523,13 +485,12 @@ impl<'a> OnBeforeParse<'a> {
|
||||
/// },
|
||||
/// };
|
||||
/// ```
|
||||
pub unsafe fn external<T: 'static + Sync>(&self) -> PluginResult<Option<&'static T>> {
|
||||
if unsafe { (*self.args_raw).external.is_null() } {
|
||||
pub unsafe fn external<T: 'static + Sync>(&self) -> Result<Option<&'static T>> {
|
||||
if self.args_raw.external.is_null() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let external: *mut TaggedObject<T> =
|
||||
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
|
||||
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
|
||||
|
||||
unsafe {
|
||||
if (*external).type_id != TypeId::of::<T>() {
|
||||
@@ -544,13 +505,12 @@ impl<'a> OnBeforeParse<'a> {
|
||||
///
|
||||
/// This is unsafe as you must ensure that no other invocation of the plugin
|
||||
/// simultaneously holds a mutable reference to the external.
|
||||
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> PluginResult<Option<&mut T>> {
|
||||
if unsafe { (*self.args_raw).external.is_null() } {
|
||||
pub unsafe fn external_mut<T: 'static + Sync>(&mut self) -> Result<Option<&mut T>> {
|
||||
if self.args_raw.external.is_null() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let external: *mut TaggedObject<T> =
|
||||
unsafe { (*self.args_raw).external as *mut TaggedObject<T> };
|
||||
let external: *mut TaggedObject<T> = self.args_raw.external as *mut TaggedObject<T>;
|
||||
|
||||
unsafe {
|
||||
if (*external).type_id != TypeId::of::<T>() {
|
||||
@@ -565,12 +525,9 @@ impl<'a> OnBeforeParse<'a> {
|
||||
///
|
||||
/// On Windows, this function may return an `Err(Error::Utf8(...))` if the
|
||||
/// source code contains invalid UTF-8.
|
||||
pub fn input_source_code(&self) -> PluginResult<Cow<'_, str>> {
|
||||
pub fn input_source_code(&self) -> Result<Cow<'_, str>> {
|
||||
let fetch_result = unsafe {
|
||||
((*self.result_raw).fetchSourceCode.unwrap())(
|
||||
self.args_raw as *const _,
|
||||
self.result_raw,
|
||||
)
|
||||
((*self.result_raw).fetchSourceCode.unwrap())(self.args_raw, self.result_raw)
|
||||
};
|
||||
|
||||
if fetch_result != 0 {
|
||||
@@ -630,7 +587,7 @@ impl<'a> OnBeforeParse<'a> {
|
||||
}
|
||||
|
||||
/// Set the output loader for the current file.
|
||||
pub fn set_output_loader(&self, loader: BunLoader) {
|
||||
pub fn set_output_loader(&self, loader: BunLogLevel) {
|
||||
// SAFETY: We don't hand out mutable references to `result_raw` so dereferencing it is safe.
|
||||
unsafe {
|
||||
(*self.result_raw).loader = loader as u8;
|
||||
@@ -649,36 +606,22 @@ impl<'a> OnBeforeParse<'a> {
|
||||
|
||||
/// Log a message with the given level.
|
||||
pub fn log(&self, message: &str, level: BunLogLevel) {
|
||||
let mut log_options = log_from_message_and_level(
|
||||
message,
|
||||
level,
|
||||
unsafe { (*self.args_raw).path_ptr },
|
||||
unsafe { (*self.args_raw).path_len },
|
||||
);
|
||||
let mut log_options = sys::BunLogOptions {
|
||||
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
|
||||
message_ptr: message.as_ptr(),
|
||||
message_len: message.len(),
|
||||
path_ptr: self.args_raw.path_ptr,
|
||||
path_len: self.args_raw.path_len,
|
||||
source_line_text_ptr: std::ptr::null(),
|
||||
source_line_text_len: 0,
|
||||
level: level as i8,
|
||||
line: 0,
|
||||
lineEnd: 0,
|
||||
column: 0,
|
||||
columnEnd: 0,
|
||||
};
|
||||
unsafe {
|
||||
((*self.result_raw).log.unwrap())(self.args_raw, &mut log_options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn log_from_message_and_level(
|
||||
message: &str,
|
||||
level: BunLogLevel,
|
||||
path: *const u8,
|
||||
path_len: usize,
|
||||
) -> sys::BunLogOptions {
|
||||
sys::BunLogOptions {
|
||||
__struct_size: std::mem::size_of::<sys::BunLogOptions>(),
|
||||
message_ptr: message.as_ptr(),
|
||||
message_len: message.len(),
|
||||
path_ptr: path as *const _,
|
||||
path_len,
|
||||
source_line_text_ptr: std::ptr::null(),
|
||||
source_line_text_len: 0,
|
||||
level: level as i8,
|
||||
line: 0,
|
||||
lineEnd: 0,
|
||||
column: 0,
|
||||
columnEnd: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ This plugin can be used to support `.yaml` loaders in Bun's bundler by passing i
|
||||
```ts
|
||||
import yamlPlugin from "bun-plugin-yaml";
|
||||
|
||||
await Bun.build({
|
||||
Bun.build({
|
||||
entrypoints: ["./index.tsx"],
|
||||
// other config
|
||||
|
||||
|
||||
9
packages/bun-types/ambient.d.ts
vendored
9
packages/bun-types/ambient.d.ts
vendored
@@ -1,9 +0,0 @@
|
||||
declare module "*.txt" {
|
||||
var text: string;
|
||||
export = text;
|
||||
}
|
||||
|
||||
declare module "*.toml" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
35
packages/bun-types/bun.d.ts
vendored
35
packages/bun-types/bun.d.ts
vendored
@@ -1544,7 +1544,7 @@ declare module "bun" {
|
||||
define?: Record<string, string>;
|
||||
// origin?: string; // e.g. http://mydomain.com
|
||||
loader?: { [k in string]: Loader };
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked" | boolean; // default: "none", true -> "inline"
|
||||
sourcemap?: "none" | "linked" | "inline" | "external" | "linked"; // default: "none", true -> "inline"
|
||||
/**
|
||||
* package.json `exports` conditions used when resolving imports
|
||||
*
|
||||
@@ -1553,26 +1553,6 @@ declare module "bun" {
|
||||
* https://nodejs.org/api/packages.html#exports
|
||||
*/
|
||||
conditions?: Array<string> | string;
|
||||
|
||||
/**
|
||||
* Controls how environment variables are handled during bundling.
|
||||
*
|
||||
* Can be one of:
|
||||
* - `"inline"`: Injects environment variables into the bundled output by converting `process.env.FOO`
|
||||
* references to string literals containing the actual environment variable values
|
||||
* - `"disable"`: Disables environment variable injection entirely
|
||||
* - A string ending in `*`: Inlines environment variables that match the given prefix.
|
||||
* For example, `"MY_PUBLIC_*"` will only include env vars starting with "MY_PUBLIC_"
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* Bun.build({
|
||||
* env: "MY_PUBLIC_*",
|
||||
* entrypoints: ["src/index.ts"],
|
||||
* })
|
||||
* ```
|
||||
*/
|
||||
env?: "inline" | "disable" | `${string}*`;
|
||||
minify?:
|
||||
| boolean
|
||||
| {
|
||||
@@ -1638,15 +1618,6 @@ declare module "bun" {
|
||||
* Drop function calls to matching property accesses.
|
||||
*/
|
||||
drop?: string[];
|
||||
|
||||
/**
|
||||
* When set to `true`, the returned promise rejects with an AggregateError when a build failure happens.
|
||||
* When set to `false`, the `success` property of the returned object will be `false` when a build failure happens.
|
||||
*
|
||||
* This defaults to `false` in Bun 1.1 and will change to `true` in Bun 1.2
|
||||
* as most usage of `Bun.build` forgets to check for errors.
|
||||
*/
|
||||
throw?: boolean;
|
||||
}
|
||||
|
||||
namespace Password {
|
||||
@@ -2160,8 +2131,6 @@ declare module "bun" {
|
||||
* });
|
||||
*/
|
||||
data: T;
|
||||
|
||||
getBufferedAmount(): number;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -3930,7 +3899,7 @@ declare module "bun" {
|
||||
* The namespace of the importer.
|
||||
*/
|
||||
namespace: string;
|
||||
/**
|
||||
/**
|
||||
* The directory to perform file-based resolutions in.
|
||||
*/
|
||||
resolveDir: string;
|
||||
|
||||
16
packages/bun-types/globals.d.ts
vendored
16
packages/bun-types/globals.d.ts
vendored
@@ -1,3 +1,5 @@
|
||||
export {};
|
||||
|
||||
type _ReadableStream<T> = typeof globalThis extends {
|
||||
onerror: any;
|
||||
ReadableStream: infer T;
|
||||
@@ -139,6 +141,16 @@ import type { TextDecoder as NodeTextDecoder, TextEncoder as NodeTextEncoder } f
|
||||
import type { MessagePort } from "worker_threads";
|
||||
import type { WebSocket as _WebSocket } from "ws";
|
||||
|
||||
declare module "*.txt" {
|
||||
var text: string;
|
||||
export = text;
|
||||
}
|
||||
|
||||
declare module "*.toml" {
|
||||
var contents: any;
|
||||
export = contents;
|
||||
}
|
||||
|
||||
declare global {
|
||||
var Bun: typeof import("bun");
|
||||
|
||||
@@ -1823,10 +1835,10 @@ declare global {
|
||||
readonly main: boolean;
|
||||
|
||||
/** Alias of `import.meta.dir`. Exists for Node.js compatibility */
|
||||
dirname: string;
|
||||
readonly dirname: string;
|
||||
|
||||
/** Alias of `import.meta.path`. Exists for Node.js compatibility */
|
||||
filename: string;
|
||||
readonly filename: string;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
1
packages/bun-types/index.d.ts
vendored
1
packages/bun-types/index.d.ts
vendored
@@ -20,4 +20,3 @@
|
||||
/// <reference path="./sqlite.d.ts" />
|
||||
/// <reference path="./wasm.d.ts" />
|
||||
/// <reference path="./deprecated.d.ts" />
|
||||
/// <reference path="./ambient.d.ts" />
|
||||
|
||||
12
packages/bun-types/jsc.d.ts
vendored
12
packages/bun-types/jsc.d.ts
vendored
@@ -214,16 +214,4 @@ declare module "bun:jsc" {
|
||||
* Run JavaScriptCore's sampling profiler
|
||||
*/
|
||||
function startSamplingProfiler(optionalDirectory?: string): void;
|
||||
|
||||
/**
|
||||
* Non-recursively estimate the memory usage of an object, excluding the memory usage of
|
||||
* properties or other objects it references. For more accurate per-object
|
||||
* memory usage, use {@link Bun.generateHeapSnapshot}.
|
||||
*
|
||||
* This is a best-effort estimate. It may not be 100% accurate. When it's
|
||||
* wrong, it may mean the memory is non-contiguous (such as a large array).
|
||||
*
|
||||
* Passing a primitive type that isn't heap allocated returns 0.
|
||||
*/
|
||||
function estimateShallowMemoryUsageOf(value: object | CallableFunction | bigint | symbol | string): number;
|
||||
}
|
||||
|
||||
2
packages/bun-types/sqlite.d.ts
vendored
2
packages/bun-types/sqlite.d.ts
vendored
@@ -1127,7 +1127,7 @@ declare module "bun:sqlite" {
|
||||
*
|
||||
* @since Bun v1.1.14
|
||||
*/
|
||||
export interface Changes {
|
||||
interface Changes {
|
||||
/**
|
||||
* The number of rows changed by the last `run` or `exec` call.
|
||||
*/
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { CString, dlopen, FFIType, JSCallback, Pointer, read, suffix } from "bun:ffi";
|
||||
import { CString, dlopen, FFIType, Pointer, read, suffix } from "bun:ffi";
|
||||
import * as tsd from "./utilities.test";
|
||||
|
||||
// `suffix` is either "dylib", "so", or "dll" depending on the platform
|
||||
@@ -62,14 +62,12 @@ const lib = dlopen(
|
||||
},
|
||||
);
|
||||
|
||||
declare const ptr: Pointer;
|
||||
|
||||
tsd.expectType<CString>(lib.symbols.sqlite3_libversion());
|
||||
tsd.expectType<number>(lib.symbols.add(1, 2));
|
||||
|
||||
tsd.expectType<Pointer | null>(lib.symbols.ptr_type(ptr));
|
||||
tsd.expectType<Pointer | null>(lib.symbols.ptr_type(0));
|
||||
|
||||
tsd.expectType<Pointer | null>(lib.symbols.fn_type(new JSCallback(() => {}, {})));
|
||||
tsd.expectType<Pointer | null>(lib.symbols.fn_type(0));
|
||||
|
||||
function _arg(
|
||||
...params: [
|
||||
@@ -168,16 +166,16 @@ tsd.expectType<void>(lib2.symbols.multi_args(1, 2));
|
||||
tsd.expectTypeEquals<ReturnType<(typeof lib2)["symbols"]["no_returns"]>, undefined>(true);
|
||||
tsd.expectTypeEquals<Parameters<(typeof lib2)["symbols"]["no_args"]>, []>(true);
|
||||
|
||||
tsd.expectType<number>(read.u8(ptr));
|
||||
tsd.expectType<number>(read.u8(ptr, 0));
|
||||
tsd.expectType<number>(read.i8(ptr, 0));
|
||||
tsd.expectType<number>(read.u16(ptr, 0));
|
||||
tsd.expectType<number>(read.i16(ptr, 0));
|
||||
tsd.expectType<number>(read.u32(ptr, 0));
|
||||
tsd.expectType<number>(read.i32(ptr, 0));
|
||||
tsd.expectType<bigint>(read.u64(ptr, 0));
|
||||
tsd.expectType<bigint>(read.i64(ptr, 0));
|
||||
tsd.expectType<number>(read.f32(ptr, 0));
|
||||
tsd.expectType<number>(read.f64(ptr, 0));
|
||||
tsd.expectType<number>(read.ptr(ptr, 0));
|
||||
tsd.expectType<number>(read.intptr(ptr, 0));
|
||||
tsd.expectType<number>(read.u8(0));
|
||||
tsd.expectType<number>(read.u8(0, 0));
|
||||
tsd.expectType<number>(read.i8(0, 0));
|
||||
tsd.expectType<number>(read.u16(0, 0));
|
||||
tsd.expectType<number>(read.i16(0, 0));
|
||||
tsd.expectType<number>(read.u32(0, 0));
|
||||
tsd.expectType<number>(read.i32(0, 0));
|
||||
tsd.expectType<bigint>(read.u64(0, 0));
|
||||
tsd.expectType<bigint>(read.i64(0, 0));
|
||||
tsd.expectType<number>(read.f32(0, 0));
|
||||
tsd.expectType<number>(read.f64(0, 0));
|
||||
tsd.expectType<number>(read.ptr(0, 0));
|
||||
tsd.expectType<number>(read.intptr(0, 0));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Changes, Database } from "bun:sqlite";
|
||||
import { Database } from "bun:sqlite";
|
||||
import { expectType } from "./utilities.test";
|
||||
|
||||
const db = new Database(":memory:");
|
||||
@@ -22,7 +22,7 @@ expectType<Array<{ name: string; dob: number }>>(allResults);
|
||||
expectType<{ name: string; dob: number } | null>(getResults);
|
||||
// tslint:disable-next-line:invalid-void
|
||||
// eslint-disable-next-line @typescript-eslint/no-invalid-void-type
|
||||
expectType<Changes>(runResults);
|
||||
expectType<void>(runResults);
|
||||
|
||||
const query3 = db.prepare<
|
||||
{ name: string; dob: number }, // return type first
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext"],
|
||||
"skipLibCheck": false,
|
||||
|
||||
"declaration": true,
|
||||
"emitDeclarationOnly": true,
|
||||
"noEmit": false,
|
||||
"declarationDir": "out"
|
||||
"strict": true,
|
||||
"target": "esnext",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"disableSolutionSearching": true,
|
||||
"noUnusedLocals": true,
|
||||
"noEmit": true,
|
||||
"resolveJsonModule": true
|
||||
},
|
||||
"files": ["ambient.d.ts"], // ambient defines .txt and .toml loaders
|
||||
"include": ["**/*.ts"],
|
||||
"exclude": ["dist", "node_modules"]
|
||||
}
|
||||
|
||||
@@ -304,21 +304,13 @@ static LIBUS_SOCKET_DESCRIPTOR win32_set_nonblocking(LIBUS_SOCKET_DESCRIPTOR fd)
|
||||
}
|
||||
|
||||
LIBUS_SOCKET_DESCRIPTOR bsd_set_nonblocking(LIBUS_SOCKET_DESCRIPTOR fd) {
|
||||
/* Libuv will set windows sockets as non-blocking */
|
||||
#ifndef _WIN32
|
||||
if (LIKELY(fd != LIBUS_SOCKET_ERROR)) {
|
||||
int flags = fcntl(fd, F_GETFL, 0);
|
||||
|
||||
// F_GETFL supports O_NONBLCOK
|
||||
fcntl(fd, F_SETFL, flags | O_NONBLOCK);
|
||||
|
||||
flags = fcntl(fd, F_GETFD, 0);
|
||||
|
||||
// F_GETFD supports FD_CLOEXEC
|
||||
fcntl(fd, F_SETFD, flags | FD_CLOEXEC);
|
||||
}
|
||||
#ifdef _WIN32
|
||||
/* Libuv will set windows sockets as non-blocking */
|
||||
#elif defined(__APPLE__)
|
||||
fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK | O_CLOEXEC);
|
||||
#else
|
||||
fcntl(fd, F_SETFL, fcntl(fd, F_GETFL, 0) | O_NONBLOCK);
|
||||
#endif
|
||||
|
||||
return fd;
|
||||
}
|
||||
|
||||
@@ -403,27 +395,12 @@ void bsd_socket_flush(LIBUS_SOCKET_DESCRIPTOR fd) {
|
||||
}
|
||||
|
||||
LIBUS_SOCKET_DESCRIPTOR bsd_create_socket(int domain, int type, int protocol) {
|
||||
LIBUS_SOCKET_DESCRIPTOR created_fd;
|
||||
#if defined(SOCK_CLOEXEC) && defined(SOCK_NONBLOCK)
|
||||
const int flags = SOCK_CLOEXEC | SOCK_NONBLOCK;
|
||||
do {
|
||||
created_fd = socket(domain, type | flags, protocol);
|
||||
} while (IS_EINTR(created_fd));
|
||||
|
||||
if (UNLIKELY(created_fd == -1)) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
int flags = SOCK_CLOEXEC | SOCK_NONBLOCK;
|
||||
LIBUS_SOCKET_DESCRIPTOR created_fd = socket(domain, type | flags, protocol);
|
||||
return apple_no_sigpipe(created_fd);
|
||||
#else
|
||||
do {
|
||||
created_fd = socket(domain, type, protocol);
|
||||
} while (IS_EINTR(created_fd));
|
||||
|
||||
if (UNLIKELY(created_fd == -1)) {
|
||||
return LIBUS_SOCKET_ERROR;
|
||||
}
|
||||
|
||||
LIBUS_SOCKET_DESCRIPTOR created_fd = socket(domain, type, protocol);
|
||||
return bsd_set_nonblocking(apple_no_sigpipe(created_fd));
|
||||
#endif
|
||||
}
|
||||
|
||||
@@ -73,10 +73,6 @@ public:
|
||||
DROPPED
|
||||
};
|
||||
|
||||
size_t memoryCost() {
|
||||
return getBufferedAmount() + sizeof(WebSocket);
|
||||
}
|
||||
|
||||
/* Sending fragmented messages puts a bit of effort on the user; you must not interleave regular sends
|
||||
* with fragmented sends and you must sendFirstFragment, [sendFragment], then finally sendLastFragment. */
|
||||
SendStatus sendFirstFragment(std::string_view message, OpCode opCode = OpCode::BINARY, bool compress = false) {
|
||||
|
||||
@@ -31,7 +31,7 @@ At its core is the _Bun runtime_, a fast JavaScript runtime designed as a drop-i
|
||||
|
||||
When running programs with Bun from a Visual Studio Code terminal, Bun will connect to the extension and report errors as they happen, at the exact location they happened. We recommend using this feature with `bun --watch` so you can see errors on every save.
|
||||
|
||||

|
||||

|
||||
|
||||
<div align="center">
|
||||
<sup>In the example above VSCode is saving on every keypress. Under normal configuration you'd only see errors on every save.</sup>
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||
"title": "JSON schema for bun.lock files.",
|
||||
"allowTrailingCommas": true,
|
||||
"type": "object"
|
||||
}
|
||||
@@ -1,208 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"axios": "^1.7.7",
|
||||
"elysia": "^0.6.3",
|
||||
"express": "^4.18.2",
|
||||
"mime": "^3.0.0",
|
||||
"mime-db": "^1.52.0",
|
||||
"react": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-dom": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-refresh": "^0.0.0-experimental-380f5d67-20241113",
|
||||
"react-server-dom-bun": "^0.0.0-experimental-603e6108-20241029",
|
||||
"react-server-dom-webpack": "^0.0.0-experimental-380f5d67-20241113",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bun": "latest",
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": "^5.0.0",
|
||||
},
|
||||
},
|
||||
},
|
||||
"trustedDependencies": [
|
||||
"mime",
|
||||
],
|
||||
"packages": {
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.8", "", { "dependencies": { "@jridgewell/set-array": "^1.2.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA=="],
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="],
|
||||
"@jridgewell/set-array": ["@jridgewell/set-array@1.2.1", "", {}, "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A=="],
|
||||
"@jridgewell/source-map": ["@jridgewell/source-map@0.3.6", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25" } }, "sha512-1ZJTZebgqllO79ue2bm3rIGud/bOe0pP5BjSRCRxxYkEZS8STV7zN84UBbiYu7jy+eCKSnVIUgoWWE/tt+shMQ=="],
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.0", "", {}, "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ=="],
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
|
||||
"@sinclair/typebox": ["@sinclair/typebox@0.30.4", "", {}, "sha512-wFuuDR+O1OAE2GL0q68h1Ty00RE6Ihcixr55A6TU5RCvOUHnwJw9LGuDVg9NxDiAp7m/YJpa+UaOuLAz0ziyOQ=="],
|
||||
"@types/bun": ["@types/bun@1.1.14", "", { "dependencies": { "bun-types": "1.1.37" } }, "sha512-opVYiFGtO2af0dnWBdZWlioLBoxSdDO5qokaazLhq8XQtGZbY4pY3/JxY8Zdf/hEwGubbp7ErZXoN1+h2yesxA=="],
|
||||
"@types/eslint": ["@types/eslint@9.6.1", "", { "dependencies": { "@types/estree": "*", "@types/json-schema": "*" } }, "sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag=="],
|
||||
"@types/eslint-scope": ["@types/eslint-scope@3.7.7", "", { "dependencies": { "@types/eslint": "*", "@types/estree": "*" } }, "sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg=="],
|
||||
"@types/estree": ["@types/estree@1.0.6", "", {}, "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw=="],
|
||||
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
|
||||
"@types/node": ["@types/node@20.12.14", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-scnD59RpYD91xngrQQLGkE+6UrHUPzeKZWhhjBSa3HSkwjbQc38+q3RoIVEwxQGRw3M+j5hpNAM+lgV3cVormg=="],
|
||||
"@types/ws": ["@types/ws@8.5.13", "", { "dependencies": { "@types/node": "*" } }, "sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA=="],
|
||||
"@webassemblyjs/ast": ["@webassemblyjs/ast@1.14.1", "", { "dependencies": { "@webassemblyjs/helper-numbers": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2" } }, "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ=="],
|
||||
"@webassemblyjs/floating-point-hex-parser": ["@webassemblyjs/floating-point-hex-parser@1.13.2", "", {}, "sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA=="],
|
||||
"@webassemblyjs/helper-api-error": ["@webassemblyjs/helper-api-error@1.13.2", "", {}, "sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ=="],
|
||||
"@webassemblyjs/helper-buffer": ["@webassemblyjs/helper-buffer@1.14.1", "", {}, "sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA=="],
|
||||
"@webassemblyjs/helper-numbers": ["@webassemblyjs/helper-numbers@1.13.2", "", { "dependencies": { "@webassemblyjs/floating-point-hex-parser": "1.13.2", "@webassemblyjs/helper-api-error": "1.13.2", "@xtuc/long": "4.2.2" } }, "sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA=="],
|
||||
"@webassemblyjs/helper-wasm-bytecode": ["@webassemblyjs/helper-wasm-bytecode@1.13.2", "", {}, "sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA=="],
|
||||
"@webassemblyjs/helper-wasm-section": ["@webassemblyjs/helper-wasm-section@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/wasm-gen": "1.14.1" } }, "sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw=="],
|
||||
"@webassemblyjs/ieee754": ["@webassemblyjs/ieee754@1.13.2", "", { "dependencies": { "@xtuc/ieee754": "^1.2.0" } }, "sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw=="],
|
||||
"@webassemblyjs/leb128": ["@webassemblyjs/leb128@1.13.2", "", { "dependencies": { "@xtuc/long": "4.2.2" } }, "sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw=="],
|
||||
"@webassemblyjs/utf8": ["@webassemblyjs/utf8@1.13.2", "", {}, "sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ=="],
|
||||
"@webassemblyjs/wasm-edit": ["@webassemblyjs/wasm-edit@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/helper-wasm-section": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-opt": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1", "@webassemblyjs/wast-printer": "1.14.1" } }, "sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ=="],
|
||||
"@webassemblyjs/wasm-gen": ["@webassemblyjs/wasm-gen@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg=="],
|
||||
"@webassemblyjs/wasm-opt": ["@webassemblyjs/wasm-opt@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-buffer": "1.14.1", "@webassemblyjs/wasm-gen": "1.14.1", "@webassemblyjs/wasm-parser": "1.14.1" } }, "sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw=="],
|
||||
"@webassemblyjs/wasm-parser": ["@webassemblyjs/wasm-parser@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@webassemblyjs/helper-api-error": "1.13.2", "@webassemblyjs/helper-wasm-bytecode": "1.13.2", "@webassemblyjs/ieee754": "1.13.2", "@webassemblyjs/leb128": "1.13.2", "@webassemblyjs/utf8": "1.13.2" } }, "sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ=="],
|
||||
"@webassemblyjs/wast-printer": ["@webassemblyjs/wast-printer@1.14.1", "", { "dependencies": { "@webassemblyjs/ast": "1.14.1", "@xtuc/long": "4.2.2" } }, "sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw=="],
|
||||
"@xtuc/ieee754": ["@xtuc/ieee754@1.2.0", "", {}, "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA=="],
|
||||
"@xtuc/long": ["@xtuc/long@4.2.2", "", {}, "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ=="],
|
||||
"accepts": ["accepts@1.3.8", "", { "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" } }, "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw=="],
|
||||
"acorn": ["acorn@8.14.0", "", {}, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||
"acorn-loose": ["acorn-loose@8.4.0", "", { "dependencies": { "acorn": "^8.11.0" } }, "sha512-M0EUka6rb+QC4l9Z3T0nJEzNOO7JcoJlYMrBlyBCiFSXRyxjLKayd4TbQs2FDRWQU1h9FR7QVNHt+PEaoNL5rQ=="],
|
||||
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
|
||||
"ajv-keywords": ["ajv-keywords@3.5.2", "", { "peerDependencies": { "ajv": "^6.9.1" } }, "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ=="],
|
||||
"array-flatten": ["array-flatten@1.1.1", "", {}, "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg=="],
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
"axios": ["axios@1.7.9", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } }, "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw=="],
|
||||
"body-parser": ["body-parser@1.20.3", "", { "dependencies": { "bytes": "3.1.2", "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.13.0", "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" } }, "sha512-7rAxByjUMqQ3/bHJy7D6OGXvx/MMc4IqBn/X0fcM1QUcAItpZrBEYhWGem+tzXH90c+G01ypMcYJBO9Y30203g=="],
|
||||
"browserslist": ["browserslist@4.24.2", "", { "dependencies": { "caniuse-lite": "^1.0.30001669", "electron-to-chromium": "^1.5.41", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.1" } }, "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg=="],
|
||||
"buffer-from": ["buffer-from@1.1.2", "", {}, "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="],
|
||||
"bun-types": ["bun-types@1.1.37", "", { "dependencies": { "@types/node": "~20.12.8", "@types/ws": "~8.5.10" } }, "sha512-C65lv6eBr3LPJWFZ2gswyrGZ82ljnH8flVE03xeXxKhi2ZGtFiO4isRKTKnitbSqtRAcaqYSR6djt1whI66AbA=="],
|
||||
"bytes": ["bytes@3.1.2", "", {}, "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg=="],
|
||||
"call-bind": ["call-bind@1.0.8", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-define-property": "^1.0.0", "get-intrinsic": "^1.2.4", "set-function-length": "^1.2.2" } }, "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww=="],
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="],
|
||||
"call-bound": ["call-bound@1.0.2", "", { "dependencies": { "call-bind": "^1.0.8", "get-intrinsic": "^1.2.5" } }, "sha512-0lk0PHFe/uz0vl527fG9CgdE9WdafjDbCXvBbs+LUv000TVt2Jjhqbs4Jwm8gz070w8xXyEAxrPOMullsxXeGg=="],
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001688", "", {}, "sha512-Nmqpru91cuABu/DTCXbM2NSRHzM2uVHfPnhJ/1zEAJx/ILBRVmz3pzH4N7DZqbdG0gWClsCC05Oj0mJ/1AWMbA=="],
|
||||
"chrome-trace-event": ["chrome-trace-event@1.0.4", "", {}, "sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ=="],
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
"commander": ["commander@2.20.3", "", {}, "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ=="],
|
||||
"content-disposition": ["content-disposition@0.5.4", "", { "dependencies": { "safe-buffer": "5.2.1" } }, "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ=="],
|
||||
"content-type": ["content-type@1.0.5", "", {}, "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="],
|
||||
"cookie": ["cookie@0.7.1", "", {}, "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="],
|
||||
"cookie-signature": ["cookie-signature@1.0.6", "", {}, "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ=="],
|
||||
"debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA=="],
|
||||
"define-data-property": ["define-data-property@1.1.4", "", { "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.0.1" } }, "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A=="],
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
"depd": ["depd@2.0.0", "", {}, "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw=="],
|
||||
"destroy": ["destroy@1.2.0", "", {}, "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg=="],
|
||||
"dunder-proto": ["dunder-proto@1.0.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.0", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-9+Sj30DIu+4KvHqMfLUGLFYL2PkURSYMVXJyXe92nFRvlYq5hBjLEhblKB+vkd/WVlUYMWigiY07T91Fkk0+4A=="],
|
||||
"ee-first": ["ee-first@1.1.1", "", {}, "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow=="],
|
||||
"electron-to-chromium": ["electron-to-chromium@1.5.73", "", {}, "sha512-8wGNxG9tAG5KhGd3eeA0o6ixhiNdgr0DcHWm85XPCphwZgD1lIEoi6t3VERayWao7SF7AAZTw6oARGJeVjH8Kg=="],
|
||||
"elysia": ["elysia@0.6.24", "", { "dependencies": { "@sinclair/typebox": "^0.30.4", "fast-querystring": "^1.1.2", "memoirist": "0.1.4", "mergician": "^1.1.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeerDependencies": ["typescript"] }, "sha512-qaN8b816tSecNIsgNwFCMOMlayOaChme9i/VHxCRZyPTgtdAAnrYDZaUQfatyt1jcHUdkf3IT4ny5GuS7NB26w=="],
|
||||
"encodeurl": ["encodeurl@2.0.0", "", {}, "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg=="],
|
||||
"enhanced-resolve": ["enhanced-resolve@5.17.1", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, "sha512-LMHl3dXhTcfv8gM4kEzIUeTQ+7fpdA0l2tUf34BddXPkz2A5xJ5L/Pchd5BL6rdccM9QGvu0sWZzK1Z1t4wwyg=="],
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||
"es-module-lexer": ["es-module-lexer@1.5.4", "", {}, "sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw=="],
|
||||
"es-object-atoms": ["es-object-atoms@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw=="],
|
||||
"escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="],
|
||||
"escape-html": ["escape-html@1.0.3", "", {}, "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow=="],
|
||||
"eslint-scope": ["eslint-scope@5.1.1", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } }, "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw=="],
|
||||
"esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="],
|
||||
"estraverse": ["estraverse@4.3.0", "", {}, "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw=="],
|
||||
"etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="],
|
||||
"events": ["events@3.3.0", "", {}, "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q=="],
|
||||
"express": ["express@4.21.2", "", { "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "etag": "~1.8.1", "finalhandler": "1.3.1", "fresh": "0.5.2", "http-errors": "2.0.0", "merge-descriptors": "1.0.3", "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", "safe-buffer": "5.2.1", "send": "0.19.0", "serve-static": "1.16.2", "setprototypeof": "1.2.0", "statuses": "2.0.1", "type-is": "~1.6.18", "utils-merge": "1.0.1", "vary": "~1.1.2" } }, "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA=="],
|
||||
"fast-decode-uri-component": ["fast-decode-uri-component@1.0.1", "", {}, "sha512-WKgKWg5eUxvRZGwW8FvfbaH7AXSh2cL+3j5fMGzUMCxWBJ3dV3a7Wz8y2f/uQ0e3B6WmodD3oS54jTQ9HVTIIg=="],
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
|
||||
"fast-querystring": ["fast-querystring@1.1.2", "", { "dependencies": { "fast-decode-uri-component": "^1.0.1" } }, "sha512-g6KuKWmFXc0fID8WWH0jit4g0AGBoJhCkJMb1RmbsSEUNvQ+ZC8D6CUZ+GtF8nMzSPXnhiePyyqqipzNNEnHjg=="],
|
||||
"finalhandler": ["finalhandler@1.3.1", "", { "dependencies": { "debug": "2.6.9", "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "on-finished": "2.4.1", "parseurl": "~1.3.3", "statuses": "2.0.1", "unpipe": "~1.0.0" } }, "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ=="],
|
||||
"follow-redirects": ["follow-redirects@1.15.9", "", {}, "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ=="],
|
||||
"form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="],
|
||||
"forwarded": ["forwarded@0.2.0", "", {}, "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow=="],
|
||||
"fresh": ["fresh@0.5.2", "", {}, "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q=="],
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
"get-intrinsic": ["get-intrinsic@1.2.6", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "dunder-proto": "^1.0.0", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.0.0" } }, "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA=="],
|
||||
"glob-to-regexp": ["glob-to-regexp@0.4.1", "", {}, "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw=="],
|
||||
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||
"has-property-descriptors": ["has-property-descriptors@1.0.2", "", { "dependencies": { "es-define-property": "^1.0.0" } }, "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg=="],
|
||||
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
"http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ=="],
|
||||
"iconv-lite": ["iconv-lite@0.4.24", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3" } }, "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA=="],
|
||||
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
|
||||
"ipaddr.js": ["ipaddr.js@1.9.1", "", {}, "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="],
|
||||
"jest-worker": ["jest-worker@27.5.1", "", { "dependencies": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" } }, "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg=="],
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
"json-parse-even-better-errors": ["json-parse-even-better-errors@2.3.1", "", {}, "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w=="],
|
||||
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||
"loader-runner": ["loader-runner@4.3.0", "", {}, "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg=="],
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
"math-intrinsics": ["math-intrinsics@1.0.0", "", {}, "sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA=="],
|
||||
"media-typer": ["media-typer@0.3.0", "", {}, "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ=="],
|
||||
"memoirist": ["memoirist@0.1.4", "", {}, "sha512-D6GbPSqO2nUVOmm7VZjJc5tC60pkOVUPzLwkKl1vCiYP+2b1cG8N9q1O3P0JmNM68u8vsgefPbxRUCSGxSXD+g=="],
|
||||
"merge-descriptors": ["merge-descriptors@1.0.3", "", {}, "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ=="],
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
"mergician": ["mergician@1.1.0", "", {}, "sha512-FXbxzU6BBhGkV8XtUr8Sk015ZRaAALviit8Lle6OEgd1udX8wlu6tBeUMLGQGdz1MfHpAVNNQkXowyDnJuhXpA=="],
|
||||
"methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="],
|
||||
"mime": ["mime@3.0.0", "", {}, "sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A=="],
|
||||
"mime-db": ["mime-db@1.53.0", "", {}, "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg=="],
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
"negotiator": ["negotiator@0.6.3", "", {}, "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="],
|
||||
"neo-async": ["neo-async@2.6.2", "", {}, "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw=="],
|
||||
"node-releases": ["node-releases@2.0.19", "", {}, "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw=="],
|
||||
"object-assign": ["object-assign@4.1.1", "", {}, "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg=="],
|
||||
"object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="],
|
||||
"on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg=="],
|
||||
"openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
|
||||
"parseurl": ["parseurl@1.3.3", "", {}, "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="],
|
||||
"path-to-regexp": ["path-to-regexp@0.1.12", "", {}, "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ=="],
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
"prop-types": ["prop-types@15.8.1", "", { "dependencies": { "loose-envify": "^1.4.0", "object-assign": "^4.1.1", "react-is": "^16.13.1" } }, "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg=="],
|
||||
"proxy-addr": ["proxy-addr@2.0.7", "", { "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" } }, "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg=="],
|
||||
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
"qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="],
|
||||
"randombytes": ["randombytes@2.1.0", "", { "dependencies": { "safe-buffer": "^5.1.0" } }, "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="],
|
||||
"range-parser": ["range-parser@1.2.1", "", {}, "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg=="],
|
||||
"raw-body": ["raw-body@2.5.2", "", { "dependencies": { "bytes": "3.1.2", "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" } }, "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA=="],
|
||||
"react": ["react@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" } }, "sha512-FaS3ViFU4ag7cuhDHQgGK3DAdWaD8YFXzEbO/Qzz33Si7VEzRRdnyoegFwg7VkEKxR6CvCVP6revi9Tm3Gq+WQ=="],
|
||||
"react-dom": ["react-dom@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1", "prop-types": "^15.6.2", "scheduler": "0.0.0-fec00a869" }, "peerDependencies": { "react": "0.0.0-fec00a869" } }, "sha512-atB5i2HgCvbvhtGXq9oaX/BCL2AFZjnccougU8S9eulRFNQbNrfGNwIcj04PRo3XU1ZsBw5syL/5l596UaolKA=="],
|
||||
"react-is": ["react-is@16.13.1", "", {}, "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="],
|
||||
"react-refresh": ["react-refresh@0.0.0-f77c7b9d7", "", {}, "sha512-mErwv0xcQz2sYnCJPaQ93D23Irnrfo5c+wG2k2KAgWOvFfqXPQdIUZ1j9S+gKYQI2kqgd0fdTJchEJydqroyJw=="],
|
||||
"react-server-dom-bun": ["react-server-dom-bun@0.0.0-experimental-603e6108-20241029", "", { "dependencies": { "neo-async": "^2.6.1" } }, "sha512-FfteCHlOgJSnDJRatgIkIU74jQQ9M1+fH2e6kfY9Sibu8FAWEUjgApKQPDfiXgjrkY7w0ITQu0b2FezC0eGzCw=="],
|
||||
"react-server-dom-webpack": ["react-server-dom-webpack@0.0.0-experimental-feed8f3f9-20240118", "", { "dependencies": { "acorn-loose": "^8.3.0", "loose-envify": "^1.1.0", "neo-async": "^2.6.1" }, "peerDependencies": { "react": "0.0.0-experimental-feed8f3f9-20240118", "react-dom": "0.0.0-experimental-feed8f3f9-20240118", "webpack": "^5.59.0" } }, "sha512-9+gS3ydJF5aYwKkvfzN+DtHfICzvQ+gYGv+2MVZo65gDSit1wC0vwOd0YebHqJNC2JruND+nEyd7wQAYmVdAZA=="],
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
"safer-buffer": ["safer-buffer@2.1.2", "", {}, "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="],
|
||||
"scheduler": ["scheduler@0.0.0-fec00a869", "", { "dependencies": { "loose-envify": "^1.1.0", "object-assign": "^4.1.1" } }, "sha512-0U25jnyBP6dRPYwaVW4WMYB0jJSYlrIHFmIuXv27X+KIHJr7vyE9gcFTqZ61NQTuxYLYepAHnUs4KgQEUDlI+g=="],
|
||||
"schema-utils": ["schema-utils@3.3.0", "", { "dependencies": { "@types/json-schema": "^7.0.8", "ajv": "^6.12.5", "ajv-keywords": "^3.5.2" } }, "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg=="],
|
||||
"send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="],
|
||||
"serialize-javascript": ["serialize-javascript@6.0.2", "", { "dependencies": { "randombytes": "^2.1.0" } }, "sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="],
|
||||
"serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="],
|
||||
"set-function-length": ["set-function-length@1.2.2", "", { "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", "has-property-descriptors": "^1.0.2" } }, "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg=="],
|
||||
"setprototypeof": ["setprototypeof@1.2.0", "", {}, "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw=="],
|
||||
"side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="],
|
||||
"side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="],
|
||||
"side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="],
|
||||
"side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="],
|
||||
"source-map": ["source-map@0.6.1", "", {}, "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="],
|
||||
"source-map-support": ["source-map-support@0.5.21", "", { "dependencies": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w=="],
|
||||
"statuses": ["statuses@2.0.1", "", {}, "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ=="],
|
||||
"supports-color": ["supports-color@8.1.1", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="],
|
||||
"tapable": ["tapable@2.2.1", "", {}, "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ=="],
|
||||
"terser": ["terser@5.37.0", "", { "dependencies": { "@jridgewell/source-map": "^0.3.3", "acorn": "^8.8.2", "commander": "^2.20.0", "source-map-support": "~0.5.20" } }, "sha512-B8wRRkmre4ERucLM/uXx4MOV5cbnOlVAqUst+1+iLKPI0dOgFO28f84ptoQt9HEI537PMzfYa/d+GEPKTRXmYA=="],
|
||||
"terser-webpack-plugin": ["terser-webpack-plugin@5.3.10", "", { "dependencies": { "@jridgewell/trace-mapping": "^0.3.20", "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.1", "terser": "^5.26.0" }, "peerDependencies": { "webpack": "^5.1.0" } }, "sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w=="],
|
||||
"toidentifier": ["toidentifier@1.0.1", "", {}, "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA=="],
|
||||
"type-is": ["type-is@1.6.18", "", { "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" } }, "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g=="],
|
||||
"typescript": ["typescript@5.7.2", "", {}, "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg=="],
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
"unpipe": ["unpipe@1.0.0", "", {}, "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ=="],
|
||||
"update-browserslist-db": ["update-browserslist-db@1.1.1", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" } }, "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A=="],
|
||||
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||
"utils-merge": ["utils-merge@1.0.1", "", {}, "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA=="],
|
||||
"vary": ["vary@1.1.2", "", {}, "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg=="],
|
||||
"watchpack": ["watchpack@2.4.2", "", { "dependencies": { "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.1.2" } }, "sha512-TnbFSbcOCcDgjZ4piURLCbJ3nJhznVh9kw6F6iokjiFPl8ONxe9A6nMDVXDiNbrSfLILs6vB07F7wLBrwPYzJw=="],
|
||||
"webpack": ["webpack@5.97.1", "", { "dependencies": { "@types/eslint-scope": "^3.7.7", "@types/estree": "^1.0.6", "@webassemblyjs/ast": "^1.14.1", "@webassemblyjs/wasm-edit": "^1.14.1", "@webassemblyjs/wasm-parser": "^1.14.1", "acorn": "^8.14.0", "browserslist": "^4.24.0", "chrome-trace-event": "^1.0.2", "enhanced-resolve": "^5.17.1", "es-module-lexer": "^1.2.1", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", "graceful-fs": "^4.2.11", "json-parse-even-better-errors": "^2.3.1", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", "neo-async": "^2.6.2", "schema-utils": "^3.2.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.3.10", "watchpack": "^2.4.1", "webpack-sources": "^3.2.3" } }, "sha512-EksG6gFY3L1eFMROS/7Wzgrii5mBAFe4rIr3r2BTfo7bcc+DWwFZ4OJ/miOuHJO/A85HwyI4eQ0F6IKXesO7Fg=="],
|
||||
"webpack-sources": ["webpack-sources@3.2.3", "", {}, "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w=="],
|
||||
"send/encodeurl": ["encodeurl@1.0.2", "", {}, "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w=="],
|
||||
"send/mime": ["mime@1.6.0", "", {}, "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg=="],
|
||||
"debug/ms": ["ms@2.0.0", "", {}, "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A=="],
|
||||
"mime-types/mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
"esrecurse/estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
|
||||
}
|
||||
}
|
||||
Binary file not shown.
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "bun-vscode",
|
||||
"version": "0.0.25",
|
||||
"version": "0.0.22",
|
||||
"author": "oven",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -289,29 +289,19 @@
|
||||
"aliases": [
|
||||
"bun.lockb"
|
||||
],
|
||||
"filenames": [
|
||||
"bun.lockb"
|
||||
"extensions": [
|
||||
".lockb"
|
||||
],
|
||||
"icon": {
|
||||
"dark": "assets/icon-small.png",
|
||||
"light": "assets/icon-small.png"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "jsonc",
|
||||
"filenames": [
|
||||
"bun.lock"
|
||||
]
|
||||
}
|
||||
],
|
||||
"jsonValidation": [
|
||||
{
|
||||
"fileMatch": "package.json",
|
||||
"url": "./assets/package.json"
|
||||
},
|
||||
{
|
||||
"fileMatch": "bun.lock",
|
||||
"url": "./assets/bun.lock.json"
|
||||
}
|
||||
],
|
||||
"customEditors": [
|
||||
@@ -320,7 +310,7 @@
|
||||
"displayName": "bun.lockb",
|
||||
"selector": [
|
||||
{
|
||||
"filenamePattern": "*bun.lockb"
|
||||
"filenamePattern": "*.lockb"
|
||||
}
|
||||
],
|
||||
"priority": "default"
|
||||
@@ -342,7 +332,7 @@
|
||||
]
|
||||
},
|
||||
"description": "The Visual Studio Code extension for Bun.",
|
||||
"displayName": "Bun for Visual Studio Code",
|
||||
"displayName": "Bun",
|
||||
"engines": {
|
||||
"vscode": "^1.60.0"
|
||||
},
|
||||
|
||||
@@ -64,11 +64,8 @@ export function registerDebugger(context: vscode.ExtensionContext, factory?: vsc
|
||||
vscode.DebugConfigurationProviderTriggerKind.Dynamic,
|
||||
),
|
||||
vscode.debug.registerDebugAdapterDescriptorFactory("bun", factory ?? new InlineDebugAdapterFactory()),
|
||||
vscode.window.onDidOpenTerminal(injectDebugTerminal),
|
||||
);
|
||||
|
||||
if (getConfig("debugTerminal.enabled")) {
|
||||
injectDebugTerminal2().then(context.subscriptions.push)
|
||||
}
|
||||
}
|
||||
|
||||
function runFileCommand(resource?: vscode.Uri): void {
|
||||
@@ -97,6 +94,8 @@ function debugFileCommand(resource?: vscode.Uri) {
|
||||
}
|
||||
|
||||
async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
|
||||
if (!getConfig("debugTerminal.enabled")) return;
|
||||
|
||||
const { name, creationOptions } = terminal;
|
||||
if (name !== "JavaScript Debug Terminal") {
|
||||
return;
|
||||
@@ -135,41 +134,6 @@ async function injectDebugTerminal(terminal: vscode.Terminal): Promise<void> {
|
||||
setTimeout(() => terminal.dispose(), 100);
|
||||
}
|
||||
|
||||
async function injectDebugTerminal2() {
|
||||
const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly') || vscode.extensions.getExtension('ms-vscode.js-debug');
|
||||
if (!jsDebugExt) {
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
|
||||
}
|
||||
|
||||
await jsDebugExt.activate()
|
||||
const jsDebug: import('@vscode/js-debug').IExports = jsDebugExt.exports;
|
||||
if (!jsDebug) {
|
||||
return vscode.window.onDidOpenTerminal(injectDebugTerminal)
|
||||
}
|
||||
|
||||
return jsDebug.registerDebugTerminalOptionsProvider({
|
||||
async provideTerminalOptions(options) {
|
||||
const session = new TerminalDebugSession();
|
||||
await session.initialize();
|
||||
|
||||
const { adapter, signal } = session;
|
||||
|
||||
const stopOnEntry = getConfig("debugTerminal.stopOnEntry") === true;
|
||||
const query = stopOnEntry ? "break=1" : "wait=1";
|
||||
|
||||
return {
|
||||
...options,
|
||||
env: {
|
||||
...options.env,
|
||||
"BUN_INSPECT": `${adapter.url}?${query}`,
|
||||
"BUN_INSPECT_NOTIFY": signal.url,
|
||||
BUN_INSPECT_CONNECT_TO: " ",
|
||||
},
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
class DebugConfigurationProvider implements vscode.DebugConfigurationProvider {
|
||||
provideDebugConfigurations(folder?: vscode.WorkspaceFolder): vscode.ProviderResult<vscode.DebugConfiguration[]> {
|
||||
return [DEBUG_CONFIGURATION, RUN_CONFIGURATION, ATTACH_CONFIGURATION];
|
||||
@@ -331,7 +295,7 @@ class FileDebugSession extends DebugSession {
|
||||
}
|
||||
|
||||
this.adapter.on("Adapter.reverseRequest", ({ command, arguments: args }) =>
|
||||
this.sendRequest(command, args, 5000, () => { }),
|
||||
this.sendRequest(command, args, 5000, () => {}),
|
||||
);
|
||||
|
||||
adapters.set(url, this);
|
||||
|
||||
39
packages/bun-vscode/src/vscode-js-debug.d.ts
vendored
39
packages/bun-vscode/src/vscode-js-debug.d.ts
vendored
@@ -1,39 +0,0 @@
|
||||
/*---------------------------------------------------------
|
||||
* Copyright (C) Microsoft Corporation. All rights reserved.
|
||||
*--------------------------------------------------------*/
|
||||
|
||||
declare module '@vscode/js-debug' {
|
||||
import type * as vscode from 'vscode';
|
||||
|
||||
/** @see {IExports.registerDebugTerminalOptionsProvider} */
|
||||
export interface IDebugTerminalOptionsProvider {
|
||||
/**
|
||||
* Called when the user creates a JavaScript Debug Terminal. It's called
|
||||
* with the options js-debug wants to use to create the terminal. It should
|
||||
* modify and return the options to use in the terminal.
|
||||
*
|
||||
* In order to avoid conflicting with existing logic, participants should
|
||||
* try to modify options in a additive way. For example prefer appending
|
||||
* to rather than reading and overwriting `options.env.PATH`.
|
||||
*/
|
||||
provideTerminalOptions(options: vscode.TerminalOptions): vscode.ProviderResult<vscode.TerminalOptions>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the exports of the `js-debug` extension. Once you have this typings
|
||||
* file, these can be acquired in your extension using the following code:
|
||||
*
|
||||
* ```
|
||||
* const jsDebugExt = vscode.extensions.getExtension('ms-vscode.js-debug-nightly')
|
||||
* || vscode.extensions.getExtension('ms-vscode.js-debug');
|
||||
* await jsDebugExt.activate()
|
||||
* const jsDebug: import('@vscode/js-debug').IExports = jsDebug.exports;
|
||||
* ```
|
||||
*/
|
||||
export interface IExports {
|
||||
/**
|
||||
* Registers a participant used when the user creates a JavaScript Debug Terminal.
|
||||
*/
|
||||
registerDebugTerminalOptionsProvider(provider: IDebugTerminalOptionsProvider): vscode.Disposable;
|
||||
}
|
||||
}
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
getEnv,
|
||||
writeFile,
|
||||
spawnSafe,
|
||||
spawn,
|
||||
mkdir,
|
||||
} from "./utils.mjs";
|
||||
import { parseArgs } from "node:util";
|
||||
@@ -75,10 +76,10 @@ async function doBuildkiteAgent(action) {
|
||||
command_user=${escape(username)}
|
||||
|
||||
pidfile=${escape(pidPath)}
|
||||
start_stop_daemon_args=" \\
|
||||
--background \\
|
||||
--make-pidfile \\
|
||||
--stdout ${escape(agentLogPath)} \\
|
||||
start_stop_daemon_args=" \
|
||||
--background \
|
||||
--make-pidfile \
|
||||
--stdout ${escape(agentLogPath)} \
|
||||
--stderr ${escape(agentLogPath)}"
|
||||
|
||||
depend() {
|
||||
@@ -87,6 +88,7 @@ async function doBuildkiteAgent(action) {
|
||||
}
|
||||
`;
|
||||
writeFile(servicePath, service, { mode: 0o755 });
|
||||
writeFile(`/etc/conf.d/buildkite-agent`, `rc_ulimit="-n 262144"`);
|
||||
await spawnSafe(["rc-update", "add", "buildkite-agent", "default"], { stdio: "inherit", privileged: true });
|
||||
}
|
||||
|
||||
@@ -141,7 +143,7 @@ async function doBuildkiteAgent(action) {
|
||||
shell = `"${cmd}" /S /C`;
|
||||
} else {
|
||||
const sh = which("sh", { required: true });
|
||||
shell = `${sh} -elc`;
|
||||
shell = `${sh} -e -c`;
|
||||
}
|
||||
|
||||
const flags = ["enable-job-log-tmpfile", "no-feature-reporting"];
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/sh
|
||||
# Version: 9
|
||||
# Version: 7
|
||||
|
||||
# A script that installs the dependencies needed to build and test Bun.
|
||||
# This should work on macOS and Linux with a POSIX shell.
|
||||
@@ -11,17 +11,15 @@
|
||||
# increment the version comment to indicate that a new image should be built.
|
||||
# Otherwise, the existing image will be retroactively updated.
|
||||
|
||||
pid="$$"
|
||||
pid=$$
|
||||
|
||||
print() {
|
||||
echo "$@"
|
||||
}
|
||||
|
||||
error() {
|
||||
print "error: $@" >&2
|
||||
if ! [ "$$" = "$pid" ]; then
|
||||
kill -s TERM "$pid"
|
||||
fi
|
||||
echo "error: $@" >&2
|
||||
kill -s TERM "$pid"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -41,32 +39,24 @@ execute_sudo() {
|
||||
}
|
||||
|
||||
execute_as_user() {
|
||||
sh="$(require sh)"
|
||||
|
||||
if [ "$sudo" = "1" ] || [ "$can_sudo" = "1" ]; then
|
||||
if [ -f "$(which sudo)" ]; then
|
||||
execute sudo -n -u "$user" "$sh" -lc "$*"
|
||||
execute sudo -n -u "$user" /bin/sh -c "$*"
|
||||
elif [ -f "$(which doas)" ]; then
|
||||
execute doas -u "$user" "$sh" -lc "$*"
|
||||
execute doas -u "$user" /bin/sh -c "$*"
|
||||
elif [ -f "$(which su)" ]; then
|
||||
execute su -s "$sh" "$user" -lc "$*"
|
||||
execute su -s /bin/sh "$user" -c "$*"
|
||||
else
|
||||
execute "$sh" -lc "$*"
|
||||
execute /bin/sh -c "$*"
|
||||
fi
|
||||
else
|
||||
execute "$sh" -lc "$*"
|
||||
execute /bin/sh -c "$*"
|
||||
fi
|
||||
}
|
||||
|
||||
grant_to_user() {
|
||||
path="$1"
|
||||
if ! [ -f "$path" ] && ! [ -d "$path" ]; then
|
||||
error "Could not find file or directory: \"$path\""
|
||||
fi
|
||||
|
||||
chown="$(require chown)"
|
||||
execute_sudo "$chown" -R "$user:$group" "$path"
|
||||
execute_sudo chmod -R 777 "$path"
|
||||
execute_sudo chown -R "$user:$group" "$path"
|
||||
}
|
||||
|
||||
which() {
|
||||
@@ -78,15 +68,15 @@ require() {
|
||||
if ! [ -f "$path" ]; then
|
||||
error "Command \"$1\" is required, but is not installed."
|
||||
fi
|
||||
print "$path"
|
||||
echo "$path"
|
||||
}
|
||||
|
||||
fetch() {
|
||||
curl="$(which curl)"
|
||||
curl=$(which curl)
|
||||
if [ -f "$curl" ]; then
|
||||
execute "$curl" -fsSL "$1"
|
||||
else
|
||||
wget="$(which wget)"
|
||||
wget=$(which wget)
|
||||
if [ -f "$wget" ]; then
|
||||
execute "$wget" -qO- "$1"
|
||||
else
|
||||
@@ -95,115 +85,78 @@ fetch() {
|
||||
fi
|
||||
}
|
||||
|
||||
compare_version() {
|
||||
if [ "$1" = "$2" ]; then
|
||||
print "0"
|
||||
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
|
||||
print "-1"
|
||||
else
|
||||
print "1"
|
||||
fi
|
||||
}
|
||||
download_file() {
|
||||
url="$1"
|
||||
filename="${2:-$(basename "$url")}"
|
||||
tmp="$(execute mktemp -d)"
|
||||
execute chmod 755 "$tmp"
|
||||
|
||||
create_directory() {
|
||||
path="$1"
|
||||
path_dir="$path"
|
||||
while ! [ -d "$path_dir" ]; do
|
||||
path_dir="$(dirname "$path_dir")"
|
||||
done
|
||||
path="$tmp/$filename"
|
||||
fetch "$url" >"$path"
|
||||
execute chmod 644 "$path"
|
||||
|
||||
path_needs_sudo="0"
|
||||
if ! [ -r "$path_dir" ] || ! [ -w "$path_dir" ]; then
|
||||
path_needs_sudo="1"
|
||||
fi
|
||||
|
||||
mkdir="$(require mkdir)"
|
||||
if [ "$path_needs_sudo" = "1" ]; then
|
||||
execute_sudo "$mkdir" -p "$path"
|
||||
else
|
||||
execute "$mkdir" -p "$path"
|
||||
fi
|
||||
|
||||
grant_to_user "$path"
|
||||
}
|
||||
|
||||
create_tmp_directory() {
|
||||
mktemp="$(require mktemp)"
|
||||
path="$(execute "$mktemp" -d)"
|
||||
grant_to_user "$path"
|
||||
print "$path"
|
||||
}
|
||||
|
||||
create_file() {
|
||||
path="$1"
|
||||
path_dir="$(dirname "$path")"
|
||||
if ! [ -d "$path_dir" ]; then
|
||||
create_directory "$path_dir"
|
||||
fi
|
||||
|
||||
path_needs_sudo="0"
|
||||
if ! [ -r "$path" ] || ! [ -w "$path" ]; then
|
||||
path_needs_sudo="1"
|
||||
fi
|
||||
|
||||
if [ "$path_needs_sudo" = "1" ]; then
|
||||
execute_sudo touch "$path"
|
||||
compare_version() {
|
||||
if [ "$1" = "$2" ]; then
|
||||
echo "0"
|
||||
elif [ "$1" = "$(echo -e "$1\n$2" | sort -V | head -n1)" ]; then
|
||||
echo "-1"
|
||||
else
|
||||
execute touch "$path"
|
||||
echo "1"
|
||||
fi
|
||||
|
||||
content="$2"
|
||||
if [ -n "$content" ]; then
|
||||
append_file "$path" "$content"
|
||||
fi
|
||||
|
||||
grant_to_user "$path"
|
||||
}
|
||||
|
||||
append_file() {
|
||||
path="$1"
|
||||
if ! [ -f "$path" ]; then
|
||||
create_file "$path"
|
||||
fi
|
||||
|
||||
path_needs_sudo="0"
|
||||
if ! [ -r "$path" ] || ! [ -w "$path" ]; then
|
||||
path_needs_sudo="1"
|
||||
fi
|
||||
|
||||
append_to_file() {
|
||||
file="$1"
|
||||
content="$2"
|
||||
print "$content" | while read -r line; do
|
||||
if ! grep -q "$line" "$path"; then
|
||||
sh="$(require sh)"
|
||||
if [ "$path_needs_sudo" = "1" ]; then
|
||||
execute_sudo "$sh" -c "echo '$line' >> '$path'"
|
||||
|
||||
file_needs_sudo="0"
|
||||
if [ -f "$file" ]; then
|
||||
if ! [ -r "$file" ] || ! [ -w "$file" ]; then
|
||||
file_needs_sudo="1"
|
||||
fi
|
||||
else
|
||||
execute_as_user mkdir -p "$(dirname "$file")"
|
||||
execute_as_user touch "$file"
|
||||
fi
|
||||
|
||||
echo "$content" | while read -r line; do
|
||||
if ! grep -q "$line" "$file"; then
|
||||
if [ "$file_needs_sudo" = "1" ]; then
|
||||
execute_sudo sh -c "echo '$line' >> '$file'"
|
||||
else
|
||||
execute "$sh" -c "echo '$line' >> '$path'"
|
||||
echo "$line" >>"$file"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
download_file() {
|
||||
file_url="$1"
|
||||
file_tmp_dir="$(create_tmp_directory)"
|
||||
file_tmp_path="$file_tmp_dir/$(basename "$file_url")"
|
||||
append_to_file_sudo() {
|
||||
file="$1"
|
||||
content="$2"
|
||||
|
||||
fetch "$file_url" >"$file_tmp_path"
|
||||
grant_to_user "$file_tmp_path"
|
||||
|
||||
print "$file_tmp_path"
|
||||
if ! [ -f "$file" ]; then
|
||||
execute_sudo mkdir -p "$(dirname "$file")"
|
||||
execute_sudo touch "$file"
|
||||
fi
|
||||
|
||||
echo "$content" | while read -r line; do
|
||||
if ! grep -q "$line" "$file"; then
|
||||
echo "$line" | execute_sudo tee "$file" >/dev/null
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
append_to_profile() {
|
||||
content="$1"
|
||||
profiles=".profile .zprofile .bash_profile .bashrc .zshrc"
|
||||
for profile in $profiles; do
|
||||
for profile_path in "$current_home/$profile" "$home/$profile"; do
|
||||
if [ "$ci" = "1" ] || [ -f "$profile_path" ]; then
|
||||
append_file "$profile_path" "$content"
|
||||
fi
|
||||
done
|
||||
file="$home/$profile"
|
||||
if [ "$ci" = "1" ] || [ -f "$file" ]; then
|
||||
append_to_file "$file" "$content"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
@@ -237,22 +190,19 @@ move_to_bin() {
|
||||
check_features() {
|
||||
print "Checking features..."
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
*--ci*)
|
||||
ci=1
|
||||
print "CI: enabled"
|
||||
;;
|
||||
*--osxcross*)
|
||||
osxcross=1
|
||||
print "Cross-compiling to macOS: enabled"
|
||||
;;
|
||||
*--gcc-13*)
|
||||
gcc_version="13"
|
||||
print "GCC 13: enabled"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
case "$CI" in
|
||||
true | 1)
|
||||
ci=1
|
||||
print "CI: enabled"
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$@" in
|
||||
*--ci*)
|
||||
ci=1
|
||||
print "CI: enabled"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
check_operating_system() {
|
||||
@@ -261,29 +211,17 @@ check_operating_system() {
|
||||
|
||||
os="$("$uname" -s)"
|
||||
case "$os" in
|
||||
Linux*)
|
||||
os="linux"
|
||||
;;
|
||||
Darwin*)
|
||||
os="darwin"
|
||||
;;
|
||||
*)
|
||||
error "Unsupported operating system: $os"
|
||||
;;
|
||||
Linux*) os="linux" ;;
|
||||
Darwin*) os="darwin" ;;
|
||||
*) error "Unsupported operating system: $os" ;;
|
||||
esac
|
||||
print "Operating System: $os"
|
||||
|
||||
arch="$("$uname" -m)"
|
||||
case "$arch" in
|
||||
x86_64 | x64 | amd64)
|
||||
arch="x64"
|
||||
;;
|
||||
aarch64 | arm64)
|
||||
arch="aarch64"
|
||||
;;
|
||||
*)
|
||||
error "Unsupported architecture: $arch"
|
||||
;;
|
||||
x86_64 | x64 | amd64) arch="x64" ;;
|
||||
aarch64 | arm64) arch="aarch64" ;;
|
||||
*) error "Unsupported architecture: $arch" ;;
|
||||
esac
|
||||
print "Architecture: $arch"
|
||||
|
||||
@@ -297,7 +235,7 @@ check_operating_system() {
|
||||
abi="musl"
|
||||
alpine="$(cat /etc/alpine-release)"
|
||||
if [ "$alpine" ~ "_" ]; then
|
||||
release="$(print "$alpine" | cut -d_ -f1)-edge"
|
||||
release="$(echo "$alpine" | cut -d_ -f1)-edge"
|
||||
else
|
||||
release="$alpine"
|
||||
fi
|
||||
@@ -317,7 +255,6 @@ check_operating_system() {
|
||||
distro="$("$sw_vers" -productName)"
|
||||
release="$("$sw_vers" -productVersion)"
|
||||
fi
|
||||
|
||||
case "$arch" in
|
||||
x64)
|
||||
sysctl="$(which sysctl)"
|
||||
@@ -340,7 +277,7 @@ check_operating_system() {
|
||||
ldd="$(which ldd)"
|
||||
if [ -f "$ldd" ]; then
|
||||
ldd_version="$($ldd --version 2>&1)"
|
||||
abi_version="$(print "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)"
|
||||
abi_version="$(echo "$ldd_version" | grep -o -E '[0-9]+\.[0-9]+(\.[0-9]+)?' | head -n 1)"
|
||||
case "$ldd_version" in
|
||||
*musl*)
|
||||
abi="musl"
|
||||
@@ -356,6 +293,11 @@ check_operating_system() {
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
nodearch="$arch"
|
||||
if [ "$arch" = "aarch64" ]; then
|
||||
nodearch="arm64"
|
||||
fi
|
||||
}
|
||||
|
||||
check_inside_docker() {
|
||||
@@ -457,10 +399,6 @@ check_user() {
|
||||
can_sudo=1
|
||||
print "Sudo: can be used"
|
||||
fi
|
||||
|
||||
current_user="$user"
|
||||
current_group="$group"
|
||||
current_home="$home"
|
||||
}
|
||||
|
||||
check_ulimit() {
|
||||
@@ -472,12 +410,15 @@ check_ulimit() {
|
||||
systemd_conf="/etc/systemd/system.conf"
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
limits_conf="/etc/security/limits.d/99-unlimited.conf"
|
||||
create_file "$limits_conf"
|
||||
if ! [ -f "$limits_conf" ]; then
|
||||
execute_sudo mkdir -p "$(dirname "$limits_conf")"
|
||||
execute_sudo touch "$limits_conf"
|
||||
fi
|
||||
fi
|
||||
|
||||
limits="core data fsize memlock nofile rss stack cpu nproc as locks sigpending msgqueue"
|
||||
for limit in $limits; do
|
||||
limit_upper="$(print "$limit" | tr '[:lower:]' '[:upper:]')"
|
||||
limit_upper="$(echo "$limit" | tr '[:lower:]' '[:upper:]')"
|
||||
|
||||
limit_value="unlimited"
|
||||
case "$limit" in
|
||||
@@ -489,13 +430,13 @@ check_ulimit() {
|
||||
if [ -f "$limits_conf" ]; then
|
||||
limit_users="root *"
|
||||
for limit_user in $limit_users; do
|
||||
append_file "$limits_conf" "$limit_user soft $limit $limit_value"
|
||||
append_file "$limits_conf" "$limit_user hard $limit $limit_value"
|
||||
append_to_file "$limits_conf" "$limit_user soft $limit $limit_value"
|
||||
append_to_file "$limits_conf" "$limit_user hard $limit $limit_value"
|
||||
done
|
||||
fi
|
||||
|
||||
if [ -f "$systemd_conf" ]; then
|
||||
append_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
|
||||
append_to_file "$systemd_conf" "DefaultLimit$limit_upper=$limit_value"
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -512,13 +453,13 @@ check_ulimit() {
|
||||
esac
|
||||
rc_ulimit="$rc_ulimit -$limit_flag $limit_value"
|
||||
done
|
||||
append_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\""
|
||||
append_to_file "$rc_conf" "rc_ulimit=\"$rc_ulimit\""
|
||||
fi
|
||||
|
||||
pam_confs="/etc/pam.d/common-session /etc/pam.d/common-session-noninteractive"
|
||||
for pam_conf in $pam_confs; do
|
||||
if [ -f "$pam_conf" ]; then
|
||||
append_file "$pam_conf" "session optional pam_limits.so"
|
||||
append_to_file "$pam_conf" "session optional pam_limits.so"
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -526,24 +467,6 @@ check_ulimit() {
|
||||
if [ -f "$systemctl" ]; then
|
||||
execute_sudo "$systemctl" daemon-reload
|
||||
fi
|
||||
|
||||
# Configure dpkg and apt for faster operation in CI environments
|
||||
if [ "$ci" = "1" ] && [ "$pm" = "apt" ]; then
|
||||
dpkg_conf="/etc/dpkg/dpkg.cfg.d/01-ci-options"
|
||||
execute_sudo create_directory "$(dirname "$dpkg_conf")"
|
||||
append_file "$dpkg_conf" "force-unsafe-io"
|
||||
append_file "$dpkg_conf" "no-debsig"
|
||||
|
||||
apt_conf="/etc/apt/apt.conf.d/99-ci-options"
|
||||
execute_sudo create_directory "$(dirname "$apt_conf")"
|
||||
append_file "$apt_conf" 'Acquire::Languages "none";'
|
||||
append_file "$apt_conf" 'Acquire::GzipIndexes "true";'
|
||||
append_file "$apt_conf" 'Acquire::CompressionTypes::Order:: "gz";'
|
||||
append_file "$apt_conf" 'APT::Get::Install-Recommends "false";'
|
||||
append_file "$apt_conf" 'APT::Get::Install-Suggests "false";'
|
||||
append_file "$apt_conf" 'Dpkg::Options { "--force-confdef"; "--force-confold"; }'
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
package_manager() {
|
||||
@@ -639,7 +562,7 @@ install_brew() {
|
||||
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh")
|
||||
execute_as_user "$bash" -lc "NONINTERACTIVE=1 $script"
|
||||
NONINTERACTIVE=1 execute_as_user "$bash" "$script"
|
||||
|
||||
case "$arch" in
|
||||
x64)
|
||||
@@ -670,6 +593,10 @@ install_common_software() {
|
||||
install_packages \
|
||||
dnf-plugins-core
|
||||
;;
|
||||
apk)
|
||||
install_packages \
|
||||
minisign
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$distro" in
|
||||
@@ -720,7 +647,7 @@ nodejs_version_exact() {
|
||||
}
|
||||
|
||||
nodejs_version() {
|
||||
print "$(nodejs_version_exact)" | cut -d. -f1
|
||||
echo "$(nodejs_version_exact)" | cut -d. -f1
|
||||
}
|
||||
|
||||
install_nodejs() {
|
||||
@@ -739,7 +666,12 @@ install_nodejs() {
|
||||
|
||||
case "$pm" in
|
||||
apk)
|
||||
install_packages nodejs npm
|
||||
node_tar_url="http://mirrors.nektro.net/nodejs/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-linux-$nodearch-musl.tar.xz"
|
||||
node_tar_file=$(download_file "$node_tar_url")
|
||||
node_tar_minisgn=$(download_file "$node_tar_url.minisig")
|
||||
execute mv "$node_tar_minisgn" "$(dirname $node_tar_file)"
|
||||
execute minisign -Vm "$node_tar_file" -P RWSbSU2slSJU1eCLS8MhjPRg0+yT47pqyoupglip88N2gogeBDxiQmbi
|
||||
execute tar -xJf "$node_tar_file" -C /usr/local --strip-components=1 --no-same-owner
|
||||
;;
|
||||
*)
|
||||
install_packages nodejs
|
||||
@@ -756,21 +688,14 @@ install_nodejs() {
|
||||
}
|
||||
|
||||
install_nodejs_headers() {
|
||||
nodejs_headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
|
||||
nodejs_headers_dir="$(dirname "$nodejs_headers_tar")"
|
||||
execute tar -xzf "$nodejs_headers_tar" -C "$nodejs_headers_dir"
|
||||
|
||||
nodejs_headers_include="$nodejs_headers_dir/node-v$(nodejs_version_exact)/include"
|
||||
execute_sudo cp -R "$nodejs_headers_include/" "/usr"
|
||||
}
|
||||
|
||||
bun_version_exact() {
|
||||
print "1.1.38"
|
||||
headers_tar="$(download_file "https://nodejs.org/download/release/v$(nodejs_version_exact)/node-v$(nodejs_version_exact)-headers.tar.gz")"
|
||||
headers_dir="$(dirname "$headers_tar")"
|
||||
execute tar -xzf "$headers_tar" -C "$headers_dir"
|
||||
headers_include="$headers_dir/node-v$(nodejs_version_exact)/include"
|
||||
execute_sudo cp -R "$headers_include/" "/usr"
|
||||
}
|
||||
|
||||
install_bun() {
|
||||
install_packages unzip
|
||||
|
||||
case "$pm" in
|
||||
apk)
|
||||
install_packages \
|
||||
@@ -779,24 +704,23 @@ install_bun() {
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$abi" in
|
||||
musl)
|
||||
bun_triplet="bun-$os-$arch-$abi"
|
||||
bash="$(require bash)"
|
||||
script=$(download_file "https://bun.sh/install")
|
||||
|
||||
version="${1:-"latest"}"
|
||||
case "$version" in
|
||||
latest)
|
||||
execute_as_user "$bash" "$script"
|
||||
;;
|
||||
*)
|
||||
bun_triplet="bun-$os-$arch"
|
||||
execute_as_user "$bash" "$script" -s "$version"
|
||||
;;
|
||||
esac
|
||||
|
||||
unzip="$(require unzip)"
|
||||
bun_download_url="https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v$(bun_version_exact)/$bun_triplet.zip"
|
||||
bun_zip="$(download_file "$bun_download_url")"
|
||||
bun_tmpdir="$(dirname "$bun_zip")"
|
||||
execute "$unzip" -o "$bun_zip" -d "$bun_tmpdir"
|
||||
|
||||
move_to_bin "$bun_tmpdir/$bun_triplet/bun"
|
||||
bun_path="$(require bun)"
|
||||
execute_sudo ln -sf "$bun_path" "$(dirname "$bun_path")/bunx"
|
||||
move_to_bin "$home/.bun/bin/bun"
|
||||
bun_path="$(which bun)"
|
||||
bunx_path="$(dirname "$bun_path")/bunx"
|
||||
execute_sudo ln -sf "$bun_path" "$bunx_path"
|
||||
}
|
||||
|
||||
install_cmake() {
|
||||
@@ -889,19 +813,24 @@ install_build_essentials() {
|
||||
|
||||
install_cmake
|
||||
install_llvm
|
||||
install_osxcross
|
||||
install_gcc
|
||||
install_ccache
|
||||
install_rust
|
||||
install_docker
|
||||
}
|
||||
|
||||
llvm_version_exact() {
|
||||
print "18.1.8"
|
||||
case "$os-$abi" in
|
||||
darwin-* | windows-* | linux-musl)
|
||||
print "18.1.8"
|
||||
;;
|
||||
linux-*)
|
||||
print "16.0.6"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
llvm_version() {
|
||||
print "$(llvm_version_exact)" | cut -d. -f1
|
||||
echo "$(llvm_version_exact)" | cut -d. -f1
|
||||
}
|
||||
|
||||
install_llvm() {
|
||||
@@ -909,7 +838,14 @@ install_llvm() {
|
||||
apt)
|
||||
bash="$(require bash)"
|
||||
llvm_script="$(download_file "https://apt.llvm.org/llvm.sh")"
|
||||
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
|
||||
case "$distro-$release" in
|
||||
ubuntu-24*)
|
||||
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all -njammy
|
||||
;;
|
||||
*)
|
||||
execute_sudo "$bash" "$llvm_script" "$(llvm_version)" all
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
brew)
|
||||
install_packages "llvm@$(llvm_version)"
|
||||
@@ -927,77 +863,6 @@ install_llvm() {
|
||||
esac
|
||||
}
|
||||
|
||||
install_gcc() {
|
||||
if ! [ "$os" = "linux" ] || ! [ "$distro" = "ubuntu" ] || [ -z "$gcc_version" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# Taken from WebKit's Dockerfile.
|
||||
# https://github.com/oven-sh/WebKit/blob/816a3c02e0f8b53f8eec06b5ed911192589b51e2/Dockerfile
|
||||
|
||||
execute_sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
execute_sudo apt update -y
|
||||
execute_sudo apt install -y \
|
||||
"gcc-$gcc_version" \
|
||||
"g++-$gcc_version" \
|
||||
"libgcc-$gcc_version-dev" \
|
||||
"libstdc++-$gcc_version-dev" \
|
||||
libasan6 \
|
||||
libubsan1 \
|
||||
libatomic1 \
|
||||
libtsan0 \
|
||||
liblsan0 \
|
||||
libgfortran5 \
|
||||
libc6-dev
|
||||
|
||||
execute_sudo update-alternatives \
|
||||
--install /usr/bin/gcc gcc "/usr/bin/gcc-$gcc_version" 130 \
|
||||
--slave /usr/bin/g++ g++ "/usr/bin/g++-$gcc_version" \
|
||||
--slave /usr/bin/gcc-ar gcc-ar "/usr/bin/gcc-ar-$gcc_version" \
|
||||
--slave /usr/bin/gcc-nm gcc-nm "/usr/bin/gcc-nm-$gcc_version" \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib "/usr/bin/gcc-ranlib-$gcc_version"
|
||||
|
||||
case "$arch" in
|
||||
x64)
|
||||
arch_path="x86_64-linux-gnu"
|
||||
;;
|
||||
aarch64)
|
||||
arch_path="aarch64-linux-gnu"
|
||||
;;
|
||||
esac
|
||||
|
||||
llvm_v="18"
|
||||
|
||||
append_to_profile "export CC=clang-${llvm_v}"
|
||||
append_to_profile "export CXX=clang++-${llvm_v}"
|
||||
append_to_profile "export AR=llvm-ar-${llvm_v}"
|
||||
append_to_profile "export RANLIB=llvm-ranlib-${llvm_v}"
|
||||
append_to_profile "export LD=lld-${llvm_v}"
|
||||
append_to_profile "export LD_LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}"
|
||||
append_to_profile "export LIBRARY_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}:/usr/lib/${arch_path}"
|
||||
append_to_profile "export CPLUS_INCLUDE_PATH=/usr/include/c++/${gcc_version}:/usr/include/${arch_path}/c++/${gcc_version}"
|
||||
append_to_profile "export C_INCLUDE_PATH=/usr/lib/gcc/${arch_path}/${gcc_version}/include"
|
||||
|
||||
gcc_path="/usr/lib/gcc/$arch_path/$gcc_version"
|
||||
create_directory "$gcc_path"
|
||||
execute_sudo ln -sf /usr/lib/$arch_path/libstdc++.so.6 "$gcc_path/libstdc++.so.6"
|
||||
|
||||
ld_conf_path="/etc/ld.so.conf.d/gcc-$gcc_version.conf"
|
||||
append_file "$ld_conf_path" "$gcc_path"
|
||||
append_file "$ld_conf_path" "/usr/lib/$arch_path"
|
||||
execute_sudo ldconfig
|
||||
|
||||
execute_sudo ln -sf $(which clang-$llvm_v) /usr/bin/clang
|
||||
execute_sudo ln -sf $(which clang++-$llvm_v) /usr/bin/clang++
|
||||
execute_sudo ln -sf $(which lld-$llvm_v) /usr/bin/lld
|
||||
execute_sudo ln -sf $(which lldb-$llvm_v) /usr/bin/lldb
|
||||
execute_sudo ln -sf $(which clangd-$llvm_v) /usr/bin/clangd
|
||||
execute_sudo ln -sf $(which llvm-ar-$llvm_v) /usr/bin/llvm-ar
|
||||
execute_sudo ln -sf $(which ld.lld-$llvm_v) /usr/bin/ld
|
||||
execute_sudo ln -sf $(which clang) /usr/bin/cc
|
||||
execute_sudo ln -sf $(which clang++) /usr/bin/c++
|
||||
}
|
||||
|
||||
install_ccache() {
|
||||
case "$pm" in
|
||||
apt | apk | brew)
|
||||
@@ -1014,23 +879,9 @@ install_rust() {
|
||||
cargo
|
||||
;;
|
||||
*)
|
||||
rust_home="/opt/rust"
|
||||
create_directory "$rust_home"
|
||||
append_to_profile "export RUSTUP_HOME=$rust_home"
|
||||
append_to_profile "export CARGO_HOME=$rust_home"
|
||||
|
||||
sh="$(require sh)"
|
||||
rustup_script=$(download_file "https://sh.rustup.rs")
|
||||
execute "$sh" -lc "$rustup_script -y --no-modify-path"
|
||||
append_to_path "$rust_home/bin"
|
||||
;;
|
||||
esac
|
||||
|
||||
case "$osxcross" in
|
||||
1)
|
||||
rustup="$(require rustup)"
|
||||
execute_as_user "$rustup" target add aarch64-apple-darwin
|
||||
execute_as_user "$rustup" target add x86_64-apple-darwin
|
||||
script=$(download_file "https://sh.rustup.rs")
|
||||
execute_as_user "$sh" "$script" -y
|
||||
;;
|
||||
esac
|
||||
}
|
||||
@@ -1073,46 +924,6 @@ install_docker() {
|
||||
fi
|
||||
}
|
||||
|
||||
macos_sdk_version() {
|
||||
# https://github.com/alexey-lysiuk/macos-sdk/releases
|
||||
print "13.3"
|
||||
}
|
||||
|
||||
install_osxcross() {
|
||||
if ! [ "$os" = "linux" ] || ! [ "$osxcross" = "1" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
install_packages \
|
||||
libssl-dev \
|
||||
lzma-dev \
|
||||
libxml2-dev \
|
||||
zlib1g-dev \
|
||||
bzip2 \
|
||||
cpio
|
||||
|
||||
osxcross_path="/opt/osxcross"
|
||||
create_directory "$osxcross_path"
|
||||
|
||||
osxcross_commit="29fe6dd35522073c9df5800f8cd1feb4b9a993a8"
|
||||
osxcross_tar="$(download_file "https://github.com/tpoechtrager/osxcross/archive/$osxcross_commit.tar.gz")"
|
||||
execute tar -xzf "$osxcross_tar" -C "$osxcross_path"
|
||||
|
||||
osxcross_build_path="$osxcross_path/build"
|
||||
execute mv "$osxcross_path/osxcross-$osxcross_commit" "$osxcross_build_path"
|
||||
|
||||
osxcross_sdk_tar="$(download_file "https://github.com/alexey-lysiuk/macos-sdk/releases/download/$(macos_sdk_version)/MacOSX$(macos_sdk_version).tar.xz")"
|
||||
execute mv "$osxcross_sdk_tar" "$osxcross_build_path/tarballs/MacOSX$(macos_sdk_version).sdk.tar.xz"
|
||||
|
||||
bash="$(require bash)"
|
||||
execute_sudo ln -sf "$(which clang-$(llvm_version))" /usr/bin/clang
|
||||
execute_sudo ln -sf "$(which clang++-$(llvm_version))" /usr/bin/clang++
|
||||
execute_sudo "$bash" -lc "UNATTENDED=1 TARGET_DIR='$osxcross_path' $osxcross_build_path/build.sh"
|
||||
|
||||
execute_sudo rm -rf "$osxcross_build_path"
|
||||
grant_to_user "$osxcross_path"
|
||||
}
|
||||
|
||||
install_tailscale() {
|
||||
if [ "$docker" = "1" ]; then
|
||||
return
|
||||
@@ -1178,12 +989,14 @@ create_buildkite_user() {
|
||||
|
||||
buildkite_paths="$home /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /var/run/buildkite-agent/buildkite-agent.sock"
|
||||
for path in $buildkite_paths; do
|
||||
create_directory "$path"
|
||||
execute_sudo mkdir -p "$path"
|
||||
execute_sudo chown -R "$user:$group" "$path"
|
||||
done
|
||||
|
||||
buildkite_files="/var/run/buildkite-agent/buildkite-agent.pid"
|
||||
for file in $buildkite_files; do
|
||||
create_file "$file"
|
||||
execute_sudo touch "$file"
|
||||
execute_sudo chown "$user:$group" "$file"
|
||||
done
|
||||
}
|
||||
|
||||
@@ -1193,22 +1006,27 @@ install_buildkite() {
|
||||
fi
|
||||
|
||||
buildkite_version="3.87.0"
|
||||
case "$arch" in
|
||||
aarch64)
|
||||
buildkite_arch="arm64"
|
||||
case "$os-$arch" in
|
||||
linux-aarch64)
|
||||
buildkite_filename="buildkite-agent-linux-arm64-$buildkite_version.tar.gz"
|
||||
;;
|
||||
x64)
|
||||
buildkite_arch="amd64"
|
||||
linux-x64)
|
||||
buildkite_filename="buildkite-agent-linux-amd64-$buildkite_version.tar.gz"
|
||||
;;
|
||||
darwin-aarch64)
|
||||
buildkite_filename="buildkite-agent-darwin-arm64-$buildkite_version.tar.gz"
|
||||
;;
|
||||
darwin-x64)
|
||||
buildkite_filename="buildkite-agent-darwin-amd64-$buildkite_version.tar.gz"
|
||||
;;
|
||||
esac
|
||||
|
||||
buildkite_filename="buildkite-agent-$os-$buildkite_arch-$buildkite_version.tar.gz"
|
||||
buildkite_url="https://github.com/buildkite/agent/releases/download/v$buildkite_version/$buildkite_filename"
|
||||
buildkite_tar="$(download_file "$buildkite_url")"
|
||||
buildkite_tmpdir="$(dirname "$buildkite_tar")"
|
||||
buildkite_filepath="$(download_file "$buildkite_url" "$buildkite_filename")"
|
||||
buildkite_tmpdir="$(dirname "$buildkite_filepath")"
|
||||
|
||||
execute tar -xzf "$buildkite_tar" -C "$buildkite_tmpdir"
|
||||
execute tar -xzf "$buildkite_filepath" -C "$buildkite_tmpdir"
|
||||
move_to_bin "$buildkite_tmpdir/buildkite-agent"
|
||||
execute rm -rf "$buildkite_tmpdir"
|
||||
}
|
||||
|
||||
install_chromium() {
|
||||
@@ -1299,19 +1117,6 @@ install_chromium() {
|
||||
esac
|
||||
}
|
||||
|
||||
clean_system() {
|
||||
if ! [ "$ci" = "1" ]; then
|
||||
return
|
||||
fi
|
||||
|
||||
print "Cleaning system..."
|
||||
|
||||
tmp_paths="/tmp /var/tmp"
|
||||
for path in $tmp_paths; do
|
||||
execute_sudo rm -rf "$path"/*
|
||||
done
|
||||
}
|
||||
|
||||
main() {
|
||||
check_features "$@"
|
||||
check_operating_system
|
||||
@@ -1323,7 +1128,6 @@ main() {
|
||||
install_common_software
|
||||
install_build_essentials
|
||||
install_chromium
|
||||
clean_system
|
||||
}
|
||||
|
||||
main "$@"
|
||||
|
||||
@@ -1,31 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
# How to use this script:
|
||||
# 1. Pick a module from node's standard library (e.g. 'assert', 'fs')
|
||||
# 2. Copy over relevant tests from node's parallel test suite into test/js/node/test/parallel
|
||||
# 3. Run this script, e.g. `./scripts/check-node.sh fs`
|
||||
# 4. Tests that passed get staged for commit
|
||||
|
||||
i=0
|
||||
j=0
|
||||
|
||||
if [[ -z $1 ]]
|
||||
then
|
||||
echo "Usage: $0 <module-name>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
-h|--help)
|
||||
echo "Usage: $0 <module-name>"
|
||||
echo "Run all parallel tests for a single module in node's standard library"
|
||||
exit 0
|
||||
;;
|
||||
esac
|
||||
|
||||
export BUN_DEBUG_QUIET_LOGS=1
|
||||
export NO_COLOR=1
|
||||
|
||||
for x in $(git ls-files test/js/node/test/parallel --exclude-standard --others | grep test-$1)
|
||||
do
|
||||
i=$((i+1))
|
||||
|
||||
@@ -1,300 +0,0 @@
|
||||
import { inspect } from "node:util";
|
||||
import { $, isCI, spawn, spawnSafe, which } from "./utils.mjs";
|
||||
|
||||
export const docker = {
|
||||
get name() {
|
||||
return "docker";
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {"linux" | "darwin" | "windows"} DockerOs
|
||||
* @typedef {"amd64" | "arm64"} DockerArch
|
||||
* @typedef {`${DockerOs}/${DockerArch}`} DockerPlatform
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {DockerPlatform}
|
||||
*/
|
||||
getPlatform(platform) {
|
||||
const { os, arch } = platform;
|
||||
if (arch === "aarch64") {
|
||||
return `${os}/arm64`;
|
||||
} else if (arch === "x64") {
|
||||
return `${os}/amd64`;
|
||||
}
|
||||
throw new Error(`Unsupported platform: ${inspect(platform)}`);
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef DockerSpawnOptions
|
||||
* @property {DockerPlatform} [platform]
|
||||
* @property {boolean} [json]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string[]} args
|
||||
* @param {DockerSpawnOptions & import("./utils.mjs").SpawnOptions} [options]
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
async spawn(args, options = {}) {
|
||||
const docker = which("docker", { required: true });
|
||||
|
||||
let env = { ...process.env };
|
||||
if (isCI) {
|
||||
env["BUILDKIT_PROGRESS"] = "plain";
|
||||
}
|
||||
|
||||
const { json, platform } = options;
|
||||
if (json) {
|
||||
args.push("--format=json");
|
||||
}
|
||||
if (platform) {
|
||||
args.push(`--platform=${platform}`);
|
||||
}
|
||||
|
||||
const { error, stdout } = await spawnSafe($`${docker} ${args}`, { env, ...options });
|
||||
if (error) {
|
||||
return;
|
||||
}
|
||||
if (!json) {
|
||||
return stdout;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(stdout);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} DockerImage
|
||||
* @property {string} Id
|
||||
* @property {string[]} RepoTags
|
||||
* @property {string[]} RepoDigests
|
||||
* @property {string} Created
|
||||
* @property {DockerOs} Os
|
||||
* @property {DockerArch} Architecture
|
||||
* @property {number} Size
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} url
|
||||
* @param {DockerPlatform} [platform]
|
||||
* @returns {Promise<boolean>}
|
||||
*/
|
||||
async pullImage(url, platform) {
|
||||
const done = await this.spawn($`pull ${url}`, {
|
||||
platform,
|
||||
throwOnError: error => !/No such image|manifest unknown/i.test(inspect(error)),
|
||||
});
|
||||
return !!done;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} url
|
||||
* @param {DockerPlatform} [platform]
|
||||
* @returns {Promise<DockerImage | undefined>}
|
||||
*/
|
||||
async inspectImage(url, platform) {
|
||||
/** @type {DockerImage[]} */
|
||||
const images = await this.spawn($`image inspect ${url}`, {
|
||||
json: true,
|
||||
throwOnError: error => !/No such image/i.test(inspect(error)),
|
||||
});
|
||||
|
||||
if (!images) {
|
||||
const pulled = await this.pullImage(url, platform);
|
||||
if (pulled) {
|
||||
return this.inspectImage(url, platform);
|
||||
}
|
||||
}
|
||||
|
||||
const { os, arch } = platform || {};
|
||||
return images
|
||||
?.filter(({ Os, Architecture }) => !os || !arch || (Os === os && Architecture === arch))
|
||||
?.find((a, b) => (a.Created < b.Created ? 1 : -1));
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} DockerContainer
|
||||
* @property {string} Id
|
||||
* @property {string} Name
|
||||
* @property {string} Image
|
||||
* @property {string} Created
|
||||
* @property {DockerContainerState} State
|
||||
* @property {DockerContainerNetworkSettings} NetworkSettings
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DockerContainerState
|
||||
* @property {"exited" | "running"} Status
|
||||
* @property {number} [Pid]
|
||||
* @property {number} ExitCode
|
||||
* @property {string} [Error]
|
||||
* @property {string} StartedAt
|
||||
* @property {string} FinishedAt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} DockerContainerNetworkSettings
|
||||
* @property {string} [IPAddress]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} containerId
|
||||
* @returns {Promise<DockerContainer | undefined>}
|
||||
*/
|
||||
async inspectContainer(containerId) {
|
||||
const containers = await this.spawn($`container inspect ${containerId}`, { json: true });
|
||||
return containers?.find(a => a.Id === containerId);
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Promise<DockerContainer[]>}
|
||||
*/
|
||||
async listContainers() {
|
||||
const containers = await this.spawn($`container ls --all`, { json: true });
|
||||
return containers || [];
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} DockerRunOptions
|
||||
* @property {string[]} [command]
|
||||
* @property {DockerPlatform} [platform]
|
||||
* @property {string} [name]
|
||||
* @property {boolean} [detach]
|
||||
* @property {"always" | "never"} [pull]
|
||||
* @property {boolean} [rm]
|
||||
* @property {"no" | "on-failure" | "always"} [restart]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} url
|
||||
* @param {DockerRunOptions} [options]
|
||||
* @returns {Promise<DockerContainer>}
|
||||
*/
|
||||
async runContainer(url, options = {}) {
|
||||
const { detach, command = [], ...containerOptions } = options;
|
||||
const args = Object.entries(containerOptions)
|
||||
.filter(([_, value]) => typeof value !== "undefined")
|
||||
.map(([key, value]) => (typeof value === "boolean" ? `--${key}` : `--${key}=${value}`));
|
||||
if (detach) {
|
||||
args.push("--detach");
|
||||
} else {
|
||||
args.push("--tty", "--interactive");
|
||||
}
|
||||
|
||||
const stdio = detach ? "pipe" : "inherit";
|
||||
const result = await this.spawn($`run ${args} ${url} ${command}`, { stdio });
|
||||
if (!detach) {
|
||||
return;
|
||||
}
|
||||
|
||||
const containerId = result.trim();
|
||||
const container = await this.inspectContainer(containerId);
|
||||
if (!container) {
|
||||
throw new Error(`Failed to run container: ${inspect(result)}`);
|
||||
}
|
||||
return container;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {Promise<DockerImage>}
|
||||
*/
|
||||
async getBaseImage(platform) {
|
||||
const { os, distro, release } = platform;
|
||||
const dockerPlatform = this.getPlatform(platform);
|
||||
|
||||
let url;
|
||||
if (os === "linux") {
|
||||
if (distro === "debian" || distro === "ubuntu" || distro === "alpine") {
|
||||
url = `docker.io/library/${distro}:${release}`;
|
||||
} else if (distro === "amazonlinux") {
|
||||
url = `public.ecr.aws/amazonlinux/amazonlinux:${release}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (url) {
|
||||
const image = await this.inspectImage(url, dockerPlatform);
|
||||
if (image) {
|
||||
return image;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported platform: ${inspect(platform)}`);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {DockerContainer} container
|
||||
* @param {MachineOptions} [options]
|
||||
* @returns {Machine}
|
||||
*/
|
||||
toMachine(container, options = {}) {
|
||||
const { Id: containerId } = container;
|
||||
|
||||
const exec = (command, options) => {
|
||||
return spawn(["docker", "exec", containerId, ...command], options);
|
||||
};
|
||||
|
||||
const execSafe = (command, options) => {
|
||||
return spawnSafe(["docker", "exec", containerId, ...command], options);
|
||||
};
|
||||
|
||||
const upload = async (source, destination) => {
|
||||
await spawn(["docker", "cp", source, `${containerId}:${destination}`]);
|
||||
};
|
||||
|
||||
const attach = async () => {
|
||||
const { exitCode, error } = await spawn(["docker", "exec", "-it", containerId, "sh"], {
|
||||
stdio: "inherit",
|
||||
});
|
||||
|
||||
if (exitCode === 0 || exitCode === 130) {
|
||||
return;
|
||||
}
|
||||
|
||||
throw error;
|
||||
};
|
||||
|
||||
const snapshot = async name => {
|
||||
await spawn(["docker", "commit", containerId]);
|
||||
};
|
||||
|
||||
const kill = async () => {
|
||||
await spawn(["docker", "kill", containerId]);
|
||||
};
|
||||
|
||||
return {
|
||||
cloud: "docker",
|
||||
id: containerId,
|
||||
spawn: exec,
|
||||
spawnSafe: execSafe,
|
||||
upload,
|
||||
attach,
|
||||
snapshot,
|
||||
close: kill,
|
||||
[Symbol.asyncDispose]: kill,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<Machine>}
|
||||
*/
|
||||
async createMachine(options) {
|
||||
const { Id: imageId, Os, Architecture } = await docker.getBaseImage(options);
|
||||
|
||||
const container = await docker.runContainer(imageId, {
|
||||
platform: `${Os}/${Architecture}`,
|
||||
command: ["sleep", "1d"],
|
||||
detach: true,
|
||||
rm: true,
|
||||
restart: "no",
|
||||
});
|
||||
|
||||
return this.toMachine(container, options);
|
||||
},
|
||||
};
|
||||
@@ -1,510 +0,0 @@
|
||||
import { $, spawnSafe, which, getUsernameForDistro } from "./utils.mjs";
|
||||
|
||||
export const google = {
|
||||
get cloud() {
|
||||
return "google";
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string[]} args
|
||||
* @param {import("./utils.mjs").SpawnOptions} [options]
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
async spawn(args, options = {}) {
|
||||
const gcloud = which("gcloud", { required: true });
|
||||
|
||||
let env = { ...process.env };
|
||||
// if (isCI) {
|
||||
// env; // TODO: Add Google Cloud credentials
|
||||
// } else {
|
||||
// env["TERM"] = "dumb";
|
||||
// }
|
||||
|
||||
const { stdout } = await spawnSafe($`${gcloud} ${args} --format json`, {
|
||||
env,
|
||||
...options,
|
||||
});
|
||||
try {
|
||||
return JSON.parse(stdout);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Record<string, string | undefined>} [options]
|
||||
* @returns {string[]}
|
||||
*/
|
||||
getFilters(options = {}) {
|
||||
const filter = Object.entries(options)
|
||||
.filter(([, value]) => value !== undefined)
|
||||
.map(([key, value]) => [value.includes("*") ? `${key}~${value}` : `${key}=${value}`])
|
||||
.join(" AND ");
|
||||
return filter ? ["--filter", filter] : [];
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Record<string, string | boolean | undefined>} options
|
||||
* @returns {string[]}
|
||||
*/
|
||||
getFlags(options) {
|
||||
return Object.entries(options)
|
||||
.filter(([, value]) => value !== undefined)
|
||||
.flatMap(([key, value]) => {
|
||||
if (typeof value === "boolean") {
|
||||
return value ? [`--${key}`] : [];
|
||||
}
|
||||
return [`--${key}=${value}`];
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Record<string, string | boolean | undefined>} options
|
||||
* @returns {string}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/topic/escaping
|
||||
*/
|
||||
getMetadata(options) {
|
||||
const delimiter = Math.random().toString(36).substring(2, 15);
|
||||
const entries = Object.entries(options)
|
||||
.map(([key, value]) => `${key}=${value}`)
|
||||
.join(delimiter);
|
||||
return `^${delimiter}^${entries}`;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
getLabel(name) {
|
||||
return name.replace(/[^a-z0-9_-]/g, "-").toLowerCase();
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} GoogleImage
|
||||
* @property {string} id
|
||||
* @property {string} name
|
||||
* @property {string} family
|
||||
* @property {"X86_64" | "ARM64"} architecture
|
||||
* @property {string} diskSizeGb
|
||||
* @property {string} selfLink
|
||||
* @property {"READY"} status
|
||||
* @property {string} creationTimestamp
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Partial<GoogleImage>} [options]
|
||||
* @returns {Promise<GoogleImage[]>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/images/list
|
||||
*/
|
||||
async listImages(options) {
|
||||
const filters = google.getFilters(options);
|
||||
const images = await google.spawn($`compute images list ${filters} --preview-images --show-deprecated`);
|
||||
return images.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1));
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Record<string, string | boolean | undefined>} options
|
||||
* @returns {Promise<GoogleImage>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/images/create
|
||||
*/
|
||||
async createImage(options) {
|
||||
const { name, ...otherOptions } = options;
|
||||
const flags = this.getFlags(otherOptions);
|
||||
const imageId = name || "i-" + Math.random().toString(36).substring(2, 15);
|
||||
return this.spawn($`compute images create ${imageId} ${flags}`);
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} GoogleInstance
|
||||
* @property {string} id
|
||||
* @property {string} name
|
||||
* @property {"RUNNING"} status
|
||||
* @property {string} machineType
|
||||
* @property {string} zone
|
||||
* @property {GoogleDisk[]} disks
|
||||
* @property {GoogleNetworkInterface[]} networkInterfaces
|
||||
* @property {object} [scheduling]
|
||||
* @property {"STANDARD" | "SPOT"} [scheduling.provisioningModel]
|
||||
* @property {boolean} [scheduling.preemptible]
|
||||
* @property {Record<string, string | undefined>} [labels]
|
||||
* @property {string} selfLink
|
||||
* @property {string} creationTimestamp
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} GoogleDisk
|
||||
* @property {string} deviceName
|
||||
* @property {boolean} boot
|
||||
* @property {"X86_64" | "ARM64"} architecture
|
||||
* @property {string[]} [licenses]
|
||||
* @property {number} diskSizeGb
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} GoogleNetworkInterface
|
||||
* @property {"IPV4_ONLY" | "IPV4_IPV6" | "IPV6_ONLY"} stackType
|
||||
* @property {string} name
|
||||
* @property {string} network
|
||||
* @property {string} networkIP
|
||||
* @property {string} subnetwork
|
||||
* @property {GoogleAccessConfig[]} accessConfigs
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} GoogleAccessConfig
|
||||
* @property {string} name
|
||||
* @property {"ONE_TO_ONE_NAT" | "INTERNAL_NAT"} type
|
||||
* @property {string} [natIP]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Record<string, string | boolean | undefined>} options
|
||||
* @returns {Promise<GoogleInstance>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/create
|
||||
*/
|
||||
async createInstance(options) {
|
||||
const { name, ...otherOptions } = options || {};
|
||||
const flags = this.getFlags(otherOptions);
|
||||
const instanceId = name || "i-" + Math.random().toString(36).substring(2, 15);
|
||||
const [instance] = await this.spawn($`compute instances create ${instanceId} ${flags}`);
|
||||
return instance;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} instanceId
|
||||
* @param {string} zoneId
|
||||
* @returns {Promise<void>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/stop
|
||||
*/
|
||||
async stopInstance(instanceId, zoneId) {
|
||||
await this.spawn($`compute instances stop ${instanceId} --zone=${zoneId}`);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} instanceId
|
||||
* @param {string} zoneId
|
||||
* @returns {Promise<void>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/instances/delete
|
||||
*/
|
||||
async deleteInstance(instanceId, zoneId) {
|
||||
await this.spawn($`compute instances delete ${instanceId} --delete-disks=all --zone=${zoneId}`, {
|
||||
throwOnError: error => !/not found/i.test(inspect(error)),
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} instanceId
|
||||
* @param {string} username
|
||||
* @param {string} zoneId
|
||||
* @param {object} [options]
|
||||
* @param {boolean} [options.wait]
|
||||
* @returns {Promise<string | undefined>}
|
||||
* @link https://cloud.google.com/sdk/gcloud/reference/compute/reset-windows-password
|
||||
*/
|
||||
async resetWindowsPassword(instanceId, username, zoneId, options = {}) {
|
||||
const attempts = options.wait ? 15 : 1;
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
const result = await this.spawn(
|
||||
$`compute reset-windows-password ${instanceId} --user=${username} --zone=${zoneId}`,
|
||||
{
|
||||
throwOnError: error => !/instance may not be ready for use/i.test(inspect(error)),
|
||||
},
|
||||
);
|
||||
if (result) {
|
||||
const { password } = result;
|
||||
if (password) {
|
||||
return password;
|
||||
}
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, 60000 * i));
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Partial<GoogleInstance>} options
|
||||
* @returns {Promise<GoogleInstance[]>}
|
||||
*/
|
||||
async listInstances(options) {
|
||||
const filters = this.getFilters(options);
|
||||
const instances = await this.spawn($`compute instances list ${filters}`);
|
||||
return instances.sort((a, b) => (a.creationTimestamp < b.creationTimestamp ? 1 : -1));
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<GoogleImage>}
|
||||
*/
|
||||
async getMachineImage(options) {
|
||||
const { os, arch, distro, release } = options;
|
||||
const architecture = arch === "aarch64" ? "ARM64" : "X86_64";
|
||||
|
||||
/** @type {string | undefined} */
|
||||
let family;
|
||||
if (os === "linux") {
|
||||
if (!distro || distro === "debian") {
|
||||
family = `debian-${release || "*"}`;
|
||||
} else if (distro === "ubuntu") {
|
||||
family = `ubuntu-${release?.replace(/\./g, "") || "*"}`;
|
||||
} else if (distro === "fedora") {
|
||||
family = `fedora-coreos-${release || "*"}`;
|
||||
} else if (distro === "rhel") {
|
||||
family = `rhel-${release || "*"}`;
|
||||
}
|
||||
} else if (os === "windows" && arch === "x64") {
|
||||
if (!distro || distro === "server") {
|
||||
family = `windows-${release || "*"}`;
|
||||
}
|
||||
}
|
||||
|
||||
if (family) {
|
||||
const images = await this.listImages({ family, architecture });
|
||||
if (images.length) {
|
||||
const [image] = images;
|
||||
return image;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported platform: ${inspect(options)}`);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<Machine>}
|
||||
*/
|
||||
async createMachine(options) {
|
||||
const { name, os, arch, distro, instanceType, tags, preemptible, detached } = options;
|
||||
const image = await google.getMachineImage(options);
|
||||
const { selfLink: imageUrl } = image;
|
||||
|
||||
const username = getUsername(distro || os);
|
||||
const userData = getUserData({ ...options, username });
|
||||
|
||||
/** @type {Record<string, string>} */
|
||||
let metadata;
|
||||
if (os === "windows") {
|
||||
metadata = {
|
||||
"enable-windows-ssh": "TRUE",
|
||||
"sysprep-specialize-script-ps1": userData,
|
||||
};
|
||||
} else {
|
||||
metadata = {
|
||||
"user-data": userData,
|
||||
};
|
||||
}
|
||||
|
||||
const instance = await google.createInstance({
|
||||
"name": name,
|
||||
"zone": "us-central1-a",
|
||||
"image": imageUrl,
|
||||
"machine-type": instanceType || (arch === "aarch64" ? "t2a-standard-2" : "t2d-standard-2"),
|
||||
"boot-disk-auto-delete": true,
|
||||
"boot-disk-size": `${getDiskSize(options)}GB`,
|
||||
"metadata": this.getMetadata(metadata),
|
||||
"labels": Object.entries(tags || {})
|
||||
.filter(([, value]) => value !== undefined)
|
||||
.map(([key, value]) => `${this.getLabel(key)}=${value}`)
|
||||
.join(","),
|
||||
"provisioning-model": preemptible ? "SPOT" : "STANDARD",
|
||||
"instance-termination-action": preemptible || !detached ? "DELETE" : undefined,
|
||||
"no-restart-on-failure": true,
|
||||
"threads-per-core": 1,
|
||||
"max-run-duration": detached ? undefined : "6h",
|
||||
});
|
||||
|
||||
return this.toMachine(instance, options);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {GoogleInstance} instance
|
||||
* @param {MachineOptions} [options]
|
||||
* @returns {Machine}
|
||||
*/
|
||||
toMachine(instance, options = {}) {
|
||||
const { id: instanceId, name, zone: zoneUrl, machineType: machineTypeUrl, labels } = instance;
|
||||
const machineType = machineTypeUrl.split("/").pop();
|
||||
const zoneId = zoneUrl.split("/").pop();
|
||||
|
||||
let os, arch, distro, release;
|
||||
const { disks = [] } = instance;
|
||||
for (const { boot, architecture, licenses = [] } of disks) {
|
||||
if (!boot) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (architecture === "X86_64") {
|
||||
arch = "x64";
|
||||
} else if (architecture === "ARM64") {
|
||||
arch = "aarch64";
|
||||
}
|
||||
|
||||
for (const license of licenses) {
|
||||
const linuxMatch = /(debian|ubuntu|fedora|rhel)-(\d+)/i.exec(license);
|
||||
if (linuxMatch) {
|
||||
os = "linux";
|
||||
[, distro, release] = linuxMatch;
|
||||
} else {
|
||||
const windowsMatch = /windows-server-(\d+)-dc-core/i.exec(license);
|
||||
if (windowsMatch) {
|
||||
os = "windows";
|
||||
distro = "windowsserver";
|
||||
[, release] = windowsMatch;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let publicIp;
|
||||
const { networkInterfaces = [] } = instance;
|
||||
for (const { accessConfigs = [] } of networkInterfaces) {
|
||||
for (const { type, natIP } of accessConfigs) {
|
||||
if (type === "ONE_TO_ONE_NAT" && natIP) {
|
||||
publicIp = natIP;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let preemptible;
|
||||
const { scheduling } = instance;
|
||||
if (scheduling) {
|
||||
const { provisioningModel, preemptible: isPreemptible } = scheduling;
|
||||
preemptible = provisioningModel === "SPOT" || isPreemptible;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {SshOptions}
|
||||
*/
|
||||
const connect = () => {
|
||||
if (!publicIp) {
|
||||
throw new Error(`Failed to find public IP for instance: ${name}`);
|
||||
}
|
||||
|
||||
/** @type {string | undefined} */
|
||||
let username;
|
||||
|
||||
const { os, distro } = options;
|
||||
if (os || distro) {
|
||||
username = getUsernameForDistro(distro || os);
|
||||
}
|
||||
|
||||
return { hostname: publicIp, username };
|
||||
};
|
||||
|
||||
const spawn = async (command, options) => {
|
||||
const connectOptions = connect();
|
||||
return spawnSsh({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const spawnSafe = async (command, options) => {
|
||||
const connectOptions = connect();
|
||||
return spawnSshSafe({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const rdp = async () => {
|
||||
const { hostname, username } = connect();
|
||||
const rdpUsername = `${username}-rdp`;
|
||||
const password = await google.resetWindowsPassword(instanceId, rdpUsername, zoneId, { wait: true });
|
||||
return { hostname, username: rdpUsername, password };
|
||||
};
|
||||
|
||||
const attach = async () => {
|
||||
const connectOptions = connect();
|
||||
await spawnSshSafe({ ...connectOptions });
|
||||
};
|
||||
|
||||
const upload = async (source, destination) => {
|
||||
const connectOptions = connect();
|
||||
await spawnScp({ ...connectOptions, source, destination });
|
||||
};
|
||||
|
||||
const snapshot = async name => {
|
||||
const stopResult = await this.stopInstance(instanceId, zoneId);
|
||||
console.log(stopResult);
|
||||
const image = await this.createImage({
|
||||
["source-disk"]: instanceId,
|
||||
["zone"]: zoneId,
|
||||
["name"]: name || `${instanceId}-snapshot-${Date.now()}`,
|
||||
});
|
||||
console.log(image);
|
||||
return;
|
||||
};
|
||||
|
||||
const terminate = async () => {
|
||||
await google.deleteInstance(instanceId, zoneId);
|
||||
};
|
||||
|
||||
return {
|
||||
cloud: "google",
|
||||
os,
|
||||
arch,
|
||||
distro,
|
||||
release,
|
||||
id: instanceId,
|
||||
imageId: undefined,
|
||||
name,
|
||||
instanceType: machineType,
|
||||
region: zoneId,
|
||||
publicIp,
|
||||
preemptible,
|
||||
labels,
|
||||
spawn,
|
||||
spawnSafe,
|
||||
rdp,
|
||||
attach,
|
||||
upload,
|
||||
snapshot,
|
||||
close: terminate,
|
||||
[Symbol.asyncDispose]: terminate,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {Record<string, string>} [labels]
|
||||
* @returns {Promise<Machine[]>}
|
||||
*/
|
||||
async getMachines(labels) {
|
||||
const filters = labels ? this.getFilters({ labels }) : {};
|
||||
const instances = await google.listInstances(filters);
|
||||
return instances.map(instance => this.toMachine(instance));
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<MachineImage>}
|
||||
*/
|
||||
async getImage(options) {
|
||||
const { os, arch, distro, release } = options;
|
||||
const architecture = arch === "aarch64" ? "ARM64" : "X86_64";
|
||||
|
||||
let name;
|
||||
let username;
|
||||
if (os === "linux") {
|
||||
if (distro === "debian") {
|
||||
name = `debian-${release}-*`;
|
||||
username = "admin";
|
||||
} else if (distro === "ubuntu") {
|
||||
name = `ubuntu-${release.replace(/\./g, "")}-*`;
|
||||
username = "ubuntu";
|
||||
}
|
||||
} else if (os === "windows" && arch === "x64") {
|
||||
if (distro === "server") {
|
||||
name = `windows-server-${release}-dc-core-*`;
|
||||
username = "administrator";
|
||||
}
|
||||
}
|
||||
|
||||
if (name && username) {
|
||||
const images = await google.listImages({ name, architecture });
|
||||
if (images.length) {
|
||||
const [image] = images;
|
||||
const { name, selfLink } = image;
|
||||
return {
|
||||
id: selfLink,
|
||||
name,
|
||||
username,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported platform: ${inspect(platform)}`);
|
||||
},
|
||||
};
|
||||
1551
scripts/machine.mjs
1551
scripts/machine.mjs
File diff suppressed because it is too large
Load Diff
@@ -1,195 +0,0 @@
|
||||
import { inspect } from "node:util";
|
||||
import { $, mkdtemp, rm, spawnSafe, writeFile, getUsernameForDistro, spawnSshSafe, setupUserData } from "./utils.mjs";
|
||||
import { getUserData } from "./machine.mjs";
|
||||
|
||||
/**
|
||||
* @link https://docs.orbstack.dev/
|
||||
*/
|
||||
export const orbstack = {
|
||||
get name() {
|
||||
return "orbstack";
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} OrbstackImage
|
||||
* @property {string} distro
|
||||
* @property {string} version
|
||||
* @property {string} arch
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {OrbstackImage}
|
||||
*/
|
||||
getImage(platform) {
|
||||
const { os, arch, distro, release } = platform;
|
||||
if (os !== "linux" || !/^debian|ubuntu|alpine|fedora|centos$/.test(distro)) {
|
||||
throw new Error(`Unsupported platform: ${inspect(platform)}`);
|
||||
}
|
||||
|
||||
return {
|
||||
distro,
|
||||
version: release,
|
||||
arch: arch === "aarch64" ? "arm64" : "amd64",
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} OrbstackVm
|
||||
* @property {string} id
|
||||
* @property {string} name
|
||||
* @property {"running"} state
|
||||
* @property {OrbstackImage} image
|
||||
* @property {OrbstackConfig} config
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OrbstackConfig
|
||||
* @property {string} default_username
|
||||
* @property {boolean} isolated
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} OrbstackVmOptions
|
||||
* @property {string} [name]
|
||||
* @property {OrbstackImage} image
|
||||
* @property {string} [username]
|
||||
* @property {string} [password]
|
||||
* @property {string} [userData]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {OrbstackVmOptions} options
|
||||
* @returns {Promise<OrbstackVm>}
|
||||
*/
|
||||
async createVm(options) {
|
||||
const { name, image, username, password, userData } = options;
|
||||
const { distro, version, arch } = image;
|
||||
const uniqueId = name || `linux-${distro}-${version}-${arch}-${Math.random().toString(36).slice(2, 11)}`;
|
||||
|
||||
const args = [`--arch=${arch}`, `${distro}:${version}`, uniqueId];
|
||||
if (username) {
|
||||
args.push(`--user=${username}`);
|
||||
}
|
||||
if (password) {
|
||||
args.push(`--set-password=${password}`);
|
||||
}
|
||||
|
||||
let userDataPath;
|
||||
if (userData) {
|
||||
userDataPath = mkdtemp("orbstack-user-data-", "user-data.txt");
|
||||
console.log("User data path:", userData);
|
||||
writeFile(userDataPath, userData);
|
||||
args.push(`--user-data=${userDataPath}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await spawnSafe($`orbctl create ${args}`);
|
||||
} finally {
|
||||
if (userDataPath) {
|
||||
rm(userDataPath);
|
||||
}
|
||||
}
|
||||
|
||||
return this.inspectVm(uniqueId);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
*/
|
||||
async deleteVm(name) {
|
||||
await spawnSafe($`orbctl delete ${name}`, {
|
||||
throwOnError: error => !/machine not found/i.test(inspect(error)),
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Promise<OrbstackVm | undefined>}
|
||||
*/
|
||||
async inspectVm(name) {
|
||||
const { exitCode, stdout } = await spawnSafe($`orbctl info ${name} --format=json`, {
|
||||
throwOnError: error => !/machine not found/i.test(inspect(error)),
|
||||
});
|
||||
if (exitCode === 0) {
|
||||
return JSON.parse(stdout);
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @returns {Promise<OrbstackVm[]>}
|
||||
*/
|
||||
async listVms() {
|
||||
const { stdout } = await spawnSafe($`orbctl list --format=json`);
|
||||
return JSON.parse(stdout);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<Machine>}
|
||||
*/
|
||||
async createMachine(options) {
|
||||
const { distro } = options;
|
||||
const username = getUsernameForDistro(distro);
|
||||
const userData = getUserData({ ...options, username });
|
||||
|
||||
const image = this.getImage(options);
|
||||
const vm = await this.createVm({
|
||||
image,
|
||||
username,
|
||||
userData,
|
||||
});
|
||||
|
||||
const machine = this.toMachine(vm, options);
|
||||
|
||||
await setupUserData(machine, options);
|
||||
|
||||
return machine;
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {OrbstackVm} vm
|
||||
* @returns {Machine}
|
||||
*/
|
||||
toMachine(vm) {
|
||||
const { id, name, config } = vm;
|
||||
|
||||
const { default_username: username } = config;
|
||||
const connectOptions = {
|
||||
username,
|
||||
hostname: `${name}@orb`,
|
||||
};
|
||||
|
||||
const exec = async (command, options) => {
|
||||
return spawnSsh({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const execSafe = async (command, options) => {
|
||||
return spawnSshSafe({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const attach = async () => {
|
||||
await spawnSshSafe({ ...connectOptions });
|
||||
};
|
||||
|
||||
const upload = async (source, destination) => {
|
||||
await spawnSafe(["orbctl", "push", `--machine=${name}`, source, destination]);
|
||||
};
|
||||
|
||||
const close = async () => {
|
||||
await this.deleteVm(name);
|
||||
};
|
||||
|
||||
return {
|
||||
cloud: "orbstack",
|
||||
id,
|
||||
name,
|
||||
spawn: exec,
|
||||
spawnSafe: execSafe,
|
||||
upload,
|
||||
attach,
|
||||
close,
|
||||
[Symbol.asyncDispose]: close,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -26,7 +26,7 @@ import {
|
||||
getBuildUrl,
|
||||
getEnv,
|
||||
getFileUrl,
|
||||
getLoggedInUserCountOrDetails,
|
||||
getLoggedInUserCount,
|
||||
getShell,
|
||||
getWindowsExitReason,
|
||||
isBuildkite,
|
||||
@@ -102,10 +102,6 @@ const { values: options, positionals: filters } = parseArgs({
|
||||
type: "string",
|
||||
default: undefined,
|
||||
},
|
||||
["retries"]: {
|
||||
type: "string",
|
||||
default: isCI ? "4" : "0", // N retries = N+1 attempts
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
@@ -145,11 +141,7 @@ async function runTests() {
|
||||
|
||||
let i = 0;
|
||||
let total = vendorTotal + tests.length + 2;
|
||||
|
||||
const okResults = [];
|
||||
const flakyResults = [];
|
||||
const failedResults = [];
|
||||
const maxAttempts = 1 + (parseInt(options["retries"]) || 0);
|
||||
const results = [];
|
||||
|
||||
/**
|
||||
* @param {string} title
|
||||
@@ -157,79 +149,43 @@ async function runTests() {
|
||||
* @returns {Promise<TestResult>}
|
||||
*/
|
||||
const runTest = async (title, fn) => {
|
||||
const index = ++i;
|
||||
|
||||
let result, failure, flaky;
|
||||
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
||||
if (attempt > 1) {
|
||||
await new Promise(resolve => setTimeout(resolve, 5000 + Math.random() * 10_000));
|
||||
}
|
||||
|
||||
result = await startGroup(
|
||||
attempt === 1
|
||||
? `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title}`
|
||||
: `${getAnsi("gray")}[${index}/${total}]${getAnsi("reset")} ${title} ${getAnsi("gray")}[attempt #${attempt}]${getAnsi("reset")}`,
|
||||
fn,
|
||||
);
|
||||
|
||||
const { ok, stdoutPreview, error } = result;
|
||||
if (ok) {
|
||||
if (failure) {
|
||||
flakyResults.push(failure);
|
||||
} else {
|
||||
okResults.push(result);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
const color = attempt >= maxAttempts ? "red" : "yellow";
|
||||
const label = `${getAnsi(color)}[${index}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
|
||||
failure ||= result;
|
||||
flaky ||= true;
|
||||
|
||||
if (attempt >= maxAttempts) {
|
||||
flaky = false;
|
||||
failedResults.push(failure);
|
||||
}
|
||||
}
|
||||
|
||||
if (!failure) {
|
||||
return result;
|
||||
}
|
||||
const label = `${getAnsi("gray")}[${++i}/${total}]${getAnsi("reset")} ${title}`;
|
||||
const result = await startGroup(label, fn);
|
||||
results.push(result);
|
||||
|
||||
if (isBuildkite) {
|
||||
// Group flaky tests together, regardless of the title
|
||||
const context = flaky ? "flaky" : title;
|
||||
const style = flaky || title.startsWith("vendor") ? "warning" : "error";
|
||||
|
||||
const { ok, error, stdoutPreview } = result;
|
||||
if (title.startsWith("vendor")) {
|
||||
const content = formatTestToMarkdown({ ...failure, testPath: title });
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
const markdown = formatTestToMarkdown({ ...result, testPath: title });
|
||||
if (markdown) {
|
||||
reportAnnotationToBuildKite({ label: title, content: markdown, style: "warning", priority: 5 });
|
||||
}
|
||||
} else {
|
||||
const content = formatTestToMarkdown(failure);
|
||||
if (content) {
|
||||
reportAnnotationToBuildKite({ context, label: title, content, style });
|
||||
const markdown = formatTestToMarkdown(result);
|
||||
if (markdown) {
|
||||
reportAnnotationToBuildKite({ label: title, content: markdown, style: "error" });
|
||||
}
|
||||
}
|
||||
|
||||
if (!ok) {
|
||||
const label = `${getAnsi("red")}[${i}/${total}] ${title} - ${error}${getAnsi("reset")}`;
|
||||
startGroup(label, () => {
|
||||
process.stderr.write(stdoutPreview);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (isGithubAction) {
|
||||
const summaryPath = process.env["GITHUB_STEP_SUMMARY"];
|
||||
if (summaryPath) {
|
||||
const longMarkdown = formatTestToMarkdown(failure);
|
||||
const longMarkdown = formatTestToMarkdown(result);
|
||||
appendFileSync(summaryPath, longMarkdown);
|
||||
}
|
||||
const shortMarkdown = formatTestToMarkdown(failure, true);
|
||||
const shortMarkdown = formatTestToMarkdown(result, true);
|
||||
appendFileSync("comment.md", shortMarkdown);
|
||||
}
|
||||
|
||||
if (options["bail"]) {
|
||||
if (options["bail"] && !result.ok) {
|
||||
process.exit(getExitCode("fail"));
|
||||
}
|
||||
|
||||
@@ -243,7 +199,7 @@ async function runTests() {
|
||||
}
|
||||
}
|
||||
|
||||
if (!failedResults.length) {
|
||||
if (results.every(({ ok }) => ok)) {
|
||||
for (const testPath of tests) {
|
||||
const title = relative(cwd, join(testsPath, testPath)).replace(/\\/g, "/");
|
||||
if (title.startsWith("test/js/node/test/parallel/")) {
|
||||
@@ -314,37 +270,21 @@ async function runTests() {
|
||||
}
|
||||
}
|
||||
|
||||
const failedTests = results.filter(({ ok }) => !ok);
|
||||
if (isGithubAction) {
|
||||
reportOutputToGitHubAction("failing_tests_count", failedResults.length);
|
||||
const markdown = formatTestToMarkdown(failedResults);
|
||||
reportOutputToGitHubAction("failing_tests_count", failedTests.length);
|
||||
const markdown = formatTestToMarkdown(failedTests);
|
||||
reportOutputToGitHubAction("failing_tests", markdown);
|
||||
}
|
||||
|
||||
if (!isCI && !isQuiet) {
|
||||
console.table({
|
||||
"Total Tests": okResults.length + failedResults.length + flakyResults.length,
|
||||
"Passed Tests": okResults.length,
|
||||
"Failing Tests": failedResults.length,
|
||||
"Flaky Tests": flakyResults.length,
|
||||
});
|
||||
|
||||
if (failedResults.length) {
|
||||
console.log(`${getAnsi("red")}Failing Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of failedResults) {
|
||||
console.log(`${getAnsi("red")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (flakyResults.length) {
|
||||
console.log(`${getAnsi("yellow")}Flaky Tests:${getAnsi("reset")}`);
|
||||
for (const { testPath } of flakyResults) {
|
||||
console.log(`${getAnsi("yellow")}- ${testPath}${getAnsi("reset")}`);
|
||||
}
|
||||
if (!isCI) {
|
||||
!isQuiet && console.log("-------");
|
||||
!isQuiet && console.log("passing", results.length - failedTests.length, "/", results.length);
|
||||
for (const { testPath } of failedTests) {
|
||||
!isQuiet && console.log("-", testPath);
|
||||
}
|
||||
}
|
||||
|
||||
// Exclude flaky tests from the final results
|
||||
return [...okResults, ...failedResults];
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1353,7 +1293,6 @@ function listArtifactsFromBuildKite(glob, step) {
|
||||
|
||||
/**
|
||||
* @typedef {object} BuildkiteAnnotation
|
||||
* @property {string} [context]
|
||||
* @property {string} label
|
||||
* @property {string} content
|
||||
* @property {"error" | "warning" | "info"} [style]
|
||||
@@ -1364,10 +1303,10 @@ function listArtifactsFromBuildKite(glob, step) {
|
||||
/**
|
||||
* @param {BuildkiteAnnotation} annotation
|
||||
*/
|
||||
function reportAnnotationToBuildKite({ context, label, content, style = "error", priority = 3, attempt = 0 }) {
|
||||
function reportAnnotationToBuildKite({ label, content, style = "error", priority = 3, attempt = 0 }) {
|
||||
const { error, status, signal, stderr } = spawnSync(
|
||||
"buildkite-agent",
|
||||
["annotate", "--append", "--style", `${style}`, "--context", `${context || label}`, "--priority", `${priority}`],
|
||||
["annotate", "--append", "--style", `${style}`, "--context", `${label}`, "--priority", `${priority}`],
|
||||
{
|
||||
input: content,
|
||||
stdio: ["pipe", "ignore", "pipe"],
|
||||
@@ -1560,7 +1499,7 @@ export async function main() {
|
||||
|
||||
let waitForUser = false;
|
||||
while (isCI) {
|
||||
const userCount = getLoggedInUserCountOrDetails();
|
||||
const userCount = getLoggedInUserCount();
|
||||
if (!userCount) {
|
||||
if (waitForUser) {
|
||||
!isQuiet && console.log("No users logged in, exiting runner...");
|
||||
@@ -1570,11 +1509,7 @@ export async function main() {
|
||||
|
||||
if (!waitForUser) {
|
||||
startGroup("Summary");
|
||||
if (typeof userCount === "number") {
|
||||
console.warn(`Found ${userCount} users logged in, keeping the runner alive until logout...`);
|
||||
} else {
|
||||
console.warn(userCount);
|
||||
}
|
||||
console.warn(`Found ${userCount} users logged in, keeping the runner alive until logout...`);
|
||||
waitForUser = true;
|
||||
}
|
||||
|
||||
|
||||
283
scripts/tart.mjs
283
scripts/tart.mjs
@@ -1,283 +0,0 @@
|
||||
import { inspect } from "node:util";
|
||||
import { isPrivileged, spawnSafe, which } from "./utils.mjs";
|
||||
|
||||
/**
|
||||
* @link https://tart.run/
|
||||
* @link https://github.com/cirruslabs/tart
|
||||
*/
|
||||
export const tart = {
|
||||
get name() {
|
||||
return "tart";
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string[]} args
|
||||
* @param {import("./utils.mjs").SpawnOptions} options
|
||||
* @returns {Promise<unknown>}
|
||||
*/
|
||||
async spawn(args, options) {
|
||||
const tart = which("tart", { required: true });
|
||||
const { json } = options || {};
|
||||
const command = json ? [tart, ...args, "--format=json"] : [tart, ...args];
|
||||
|
||||
const { stdout } = await spawnSafe(command, options);
|
||||
if (!json) {
|
||||
return stdout;
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(stdout);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {"sequoia" | "sonoma" | "ventura" | "monterey"} TartDistro
|
||||
* @typedef {`ghcr.io/cirruslabs/macos-${TartDistro}-xcode`} TartImage
|
||||
* @link https://github.com/orgs/cirruslabs/packages?repo_name=macos-image-templates
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {Platform} platform
|
||||
* @returns {TartImage}
|
||||
*/
|
||||
getImage(platform) {
|
||||
const { os, arch, release } = platform;
|
||||
if (os !== "darwin" || arch !== "aarch64") {
|
||||
throw new Error(`Unsupported platform: ${inspect(platform)}`);
|
||||
}
|
||||
const distros = {
|
||||
"15": "sequoia",
|
||||
"14": "sonoma",
|
||||
"13": "ventura",
|
||||
"12": "monterey",
|
||||
};
|
||||
const distro = distros[release];
|
||||
if (!distro) {
|
||||
throw new Error(`Unsupported macOS release: ${distro}`);
|
||||
}
|
||||
return `ghcr.io/cirruslabs/macos-${distro}-xcode`;
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} TartVm
|
||||
* @property {string} Name
|
||||
* @property {"running" | "stopped"} State
|
||||
* @property {"local"} Source
|
||||
* @property {number} Size
|
||||
* @property {number} Disk
|
||||
* @property {number} [CPU]
|
||||
* @property {number} [Memory]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @returns {Promise<TartVm[]>}
|
||||
*/
|
||||
async listVms() {
|
||||
return this.spawn(["list"], { json: true });
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Promise<TartVm | undefined>}
|
||||
*/
|
||||
async getVm(name) {
|
||||
const result = await this.spawn(["get", name], {
|
||||
json: true,
|
||||
throwOnError: error => !/does not exist/i.test(inspect(error)),
|
||||
});
|
||||
return {
|
||||
Name: name,
|
||||
...result,
|
||||
};
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async stopVm(name) {
|
||||
await this.spawn(["stop", name, "--timeout=0"], {
|
||||
throwOnError: error => !/does not exist|is not running/i.test(inspect(error)),
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async deleteVm(name) {
|
||||
await this.stopVm(name);
|
||||
await this.spawn(["delete", name], {
|
||||
throwOnError: error => !/does not exist/i.test(inspect(error)),
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {TartImage} image
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async cloneVm(name, image) {
|
||||
const localName = image.split("/").pop();
|
||||
const localVm = await this.getVm(localName);
|
||||
if (localVm) {
|
||||
const { Name } = localVm;
|
||||
await this.spawn(["clone", Name, name]);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Cloning macOS image: ${image} (this will take a long time)`);
|
||||
await this.spawn(["clone", image, localName]);
|
||||
await this.spawn(["clone", localName, name]);
|
||||
},
|
||||
|
||||
/**
|
||||
* @typedef {Object} TartMount
|
||||
* @property {boolean} [readOnly]
|
||||
* @property {string} source
|
||||
* @property {string} destination
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {Object} TartVmOptions
|
||||
* @property {number} [cpuCount]
|
||||
* @property {number} [memoryGb]
|
||||
* @property {number} [diskSizeGb]
|
||||
* @property {boolean} [no-graphics]
|
||||
* @property {boolean} [no-audio]
|
||||
* @property {boolean} [no-clipboard]
|
||||
* @property {boolean} [recovery]
|
||||
* @property {boolean} [vnc]
|
||||
* @property {boolean} [vnc-experimental]
|
||||
* @property {boolean} [net-softnet]
|
||||
* @property {TartMount[]} [dir]
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @param {TartVmOptions} options
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async runVm(name, options = {}) {
|
||||
const { cpuCount, memoryGb, diskSizeGb, dir, ...vmOptions } = options;
|
||||
|
||||
const setArgs = ["--random-mac", "--random-serial"];
|
||||
if (cpuCount) {
|
||||
setArgs.push(`--cpu=${cpuCount}`);
|
||||
}
|
||||
if (memoryGb) {
|
||||
setArgs.push(`--memory=${memoryGb}`);
|
||||
}
|
||||
if (diskSizeGb) {
|
||||
setArgs.push(`--disk-size=${diskSizeGb}`);
|
||||
}
|
||||
await this.spawn(["set", name, ...setArgs]);
|
||||
|
||||
const args = Object.entries(vmOptions)
|
||||
.filter(([, value]) => value !== undefined)
|
||||
.flatMap(([key, value]) => (typeof value === "boolean" ? (value ? [`--${key}`] : []) : [`--${key}=${value}`]));
|
||||
if (dir?.length) {
|
||||
args.push(
|
||||
...dir.map(({ source, destination, readOnly }) => `--dir=${source}:${destination}${readOnly ? ":ro" : ""}`),
|
||||
);
|
||||
}
|
||||
|
||||
// This command is blocking, so it needs to be detached and not awaited
|
||||
this.spawn(["run", name, ...args], { detached: true });
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Promise<string | undefined>}
|
||||
*/
|
||||
async getVmIp(name) {
|
||||
const stdout = await this.spawn(["ip", name], {
|
||||
retryOnError: error => /no IP address found/i.test(inspect(error)),
|
||||
throwOnError: error => !/does not exist/i.test(inspect(error)),
|
||||
});
|
||||
return stdout?.trim();
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<Machine>}
|
||||
*/
|
||||
async createMachine(options) {
|
||||
const { name, imageName, cpuCount, memoryGb, diskSizeGb, rdp } = options;
|
||||
|
||||
const image = imageName || this.getImage(options);
|
||||
const machineId = name || `i-${Math.random().toString(36).slice(2, 11)}`;
|
||||
await this.cloneVm(machineId, image);
|
||||
|
||||
await this.runVm(machineId, {
|
||||
cpuCount,
|
||||
memoryGb,
|
||||
diskSizeGb,
|
||||
"net-softnet": isPrivileged(),
|
||||
"no-audio": true,
|
||||
"no-clipboard": true,
|
||||
"no-graphics": true,
|
||||
"vnc-experimental": rdp,
|
||||
});
|
||||
|
||||
return this.toMachine(machineId);
|
||||
},
|
||||
|
||||
/**
|
||||
* @param {string} name
|
||||
* @returns {Machine}
|
||||
*/
|
||||
toMachine(name) {
|
||||
const connect = async () => {
|
||||
const hostname = await this.getVmIp(name);
|
||||
return {
|
||||
hostname,
|
||||
// hardcoded by base images
|
||||
username: "admin",
|
||||
password: "admin",
|
||||
};
|
||||
};
|
||||
|
||||
const exec = async (command, options) => {
|
||||
const connectOptions = await connect();
|
||||
return spawnSsh({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const execSafe = async (command, options) => {
|
||||
const connectOptions = await connect();
|
||||
return spawnSshSafe({ ...connectOptions, command }, options);
|
||||
};
|
||||
|
||||
const attach = async () => {
|
||||
const connectOptions = await connect();
|
||||
await spawnSshSafe({ ...connectOptions });
|
||||
};
|
||||
|
||||
const upload = async (source, destination) => {
|
||||
const connectOptions = await connect();
|
||||
await spawnScp({ ...connectOptions, source, destination });
|
||||
};
|
||||
|
||||
const rdp = async () => {
|
||||
const connectOptions = await connect();
|
||||
await spawnRdp({ ...connectOptions });
|
||||
};
|
||||
|
||||
const close = async () => {
|
||||
await this.deleteVm(name);
|
||||
};
|
||||
|
||||
return {
|
||||
cloud: "tart",
|
||||
id: name,
|
||||
spawn: exec,
|
||||
spawnSafe: execSafe,
|
||||
attach,
|
||||
upload,
|
||||
close,
|
||||
[Symbol.asyncDispose]: close,
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -243,7 +243,7 @@ export async function spawn(command, options = {}) {
|
||||
cwd: options["cwd"] ?? process.cwd(),
|
||||
timeout: options["timeout"] ?? undefined,
|
||||
env: options["env"] ?? undefined,
|
||||
stdio: stdin === "inherit" ? "inherit" : [stdin ? "pipe" : "ignore", "pipe", "pipe"],
|
||||
stdio: [stdin ? "pipe" : "ignore", "pipe", "pipe"],
|
||||
...options,
|
||||
};
|
||||
|
||||
@@ -355,7 +355,7 @@ export function spawnSync(command, options = {}) {
|
||||
cwd: options["cwd"] ?? process.cwd(),
|
||||
timeout: options["timeout"] ?? undefined,
|
||||
env: options["env"] ?? undefined,
|
||||
stdio: stdin === "inherit" ? "inherit" : [typeof stdin === "undefined" ? "ignore" : "pipe", "pipe", "pipe"],
|
||||
stdio: [typeof stdin === "undefined" ? "ignore" : "pipe", "pipe", "pipe"],
|
||||
input: stdin,
|
||||
...options,
|
||||
};
|
||||
@@ -379,8 +379,8 @@ export function spawnSync(command, options = {}) {
|
||||
} else {
|
||||
exitCode = status ?? 1;
|
||||
signalCode = signal || undefined;
|
||||
stdout = stdoutBuffer?.toString?.() ?? "";
|
||||
stderr = stderrBuffer?.toString?.() ?? "";
|
||||
stdout = stdoutBuffer?.toString();
|
||||
stderr = stderrBuffer?.toString();
|
||||
}
|
||||
|
||||
if (exitCode !== 0 && isWindows) {
|
||||
@@ -1861,34 +1861,6 @@ export function getUsername() {
|
||||
return username;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} distro
|
||||
* @returns {string}
|
||||
*/
|
||||
export function getUsernameForDistro(distro) {
|
||||
if (/windows/i.test(distro)) {
|
||||
return "administrator";
|
||||
}
|
||||
|
||||
if (/alpine|centos/i.test(distro)) {
|
||||
return "root";
|
||||
}
|
||||
|
||||
if (/debian/i.test(distro)) {
|
||||
return "admin";
|
||||
}
|
||||
|
||||
if (/ubuntu/i.test(distro)) {
|
||||
return "ubuntu";
|
||||
}
|
||||
|
||||
if (/amazon|amzn|al\d+|rhel/i.test(distro)) {
|
||||
return "ec2-user";
|
||||
}
|
||||
|
||||
throw new Error(`Unsupported distro: ${distro}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {object} User
|
||||
* @property {string} username
|
||||
@@ -2237,7 +2209,7 @@ export async function waitForPort(options) {
|
||||
return cause;
|
||||
}
|
||||
/**
|
||||
* @returns {Promise<number>}
|
||||
* @returns {Promise<number | undefined>}
|
||||
*/
|
||||
export async function getCanaryRevision() {
|
||||
if (isPullRequest() || isFork()) {
|
||||
@@ -2716,7 +2688,7 @@ export function printEnvironment() {
|
||||
/**
|
||||
* @returns {number | undefined}
|
||||
*/
|
||||
export function getLoggedInUserCountOrDetails() {
|
||||
export function getLoggedInUserCount() {
|
||||
if (isWindows) {
|
||||
const pwsh = which(["pwsh", "powershell"]);
|
||||
if (pwsh) {
|
||||
@@ -2733,31 +2705,7 @@ export function getLoggedInUserCountOrDetails() {
|
||||
|
||||
const { error, stdout } = spawnSync(["who"]);
|
||||
if (!error) {
|
||||
const users = stdout
|
||||
.split("\n")
|
||||
.filter(line => /tty|pts/i.test(line))
|
||||
.map(line => {
|
||||
// who output format: username terminal date/time (ip)
|
||||
const [username, terminal, datetime, ip] = line.split(/\s+/);
|
||||
return {
|
||||
username,
|
||||
terminal,
|
||||
datetime,
|
||||
ip: (ip || "").replace(/[()]/g, ""), // Remove parentheses from IP
|
||||
};
|
||||
});
|
||||
|
||||
if (users.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let message = `${users.length} currently logged in users:`;
|
||||
|
||||
for (const user of users) {
|
||||
message += `\n- ${user.username} on ${user.terminal} since ${user.datetime}${user.ip ? ` from ${user.ip}` : ""}`;
|
||||
}
|
||||
|
||||
return message;
|
||||
return stdout.split("\n").filter(line => /tty|pts/i.test(line)).length;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2771,7 +2719,6 @@ const emojiMap = {
|
||||
alpine: ["🐧", "alpine"],
|
||||
aws: ["☁️", "aws"],
|
||||
amazonlinux: ["🐧", "aws"],
|
||||
nix: ["🐧", "nix"],
|
||||
windows: ["🪟", "windows"],
|
||||
true: ["✅", "white_check_mark"],
|
||||
false: ["❌", "x"],
|
||||
@@ -2801,108 +2748,3 @@ export function getBuildkiteEmoji(emoji) {
|
||||
const [, name] = emojiMap[emoji] || [];
|
||||
return name ? `:${name}:` : "";
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {SshOptions} options
|
||||
* @param {import("./utils.mjs").SpawnOptions} [spawnOptions]
|
||||
* @returns {Promise<import("./utils.mjs").SpawnResult>}
|
||||
*/
|
||||
export async function spawnSshSafe(options, spawnOptions = {}) {
|
||||
return spawnSsh(options, { throwOnError: true, ...spawnOptions });
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {SshOptions} options
|
||||
* @param {import("./utils.mjs").SpawnOptions} [spawnOptions]
|
||||
* @returns {Promise<import("./utils.mjs").SpawnResult>}
|
||||
*/
|
||||
export async function spawnSsh(options, spawnOptions = {}) {
|
||||
const { hostname, port, username, identityPaths, password, retries = 10, command: spawnCommand } = options;
|
||||
|
||||
if (!hostname.includes("@")) {
|
||||
await waitForPort({
|
||||
hostname,
|
||||
port: port || 22,
|
||||
});
|
||||
}
|
||||
|
||||
const logPath = mkdtemp("ssh-", "ssh.log");
|
||||
const command = ["ssh", hostname, "-v", "-C", "-E", logPath, "-o", "StrictHostKeyChecking=no"];
|
||||
if (!password) {
|
||||
command.push("-o", "BatchMode=yes");
|
||||
}
|
||||
if (port) {
|
||||
command.push("-p", port);
|
||||
}
|
||||
if (username) {
|
||||
command.push("-l", username);
|
||||
}
|
||||
if (password) {
|
||||
const sshPass = which("sshpass", { required: true });
|
||||
command.unshift(sshPass, "-p", password);
|
||||
} else if (identityPaths) {
|
||||
command.push(...identityPaths.flatMap(path => ["-i", path]));
|
||||
}
|
||||
const stdio = spawnCommand ? "pipe" : "inherit";
|
||||
if (spawnCommand) {
|
||||
command.push(...spawnCommand);
|
||||
}
|
||||
|
||||
/** @type {import("./utils.mjs").SpawnResult} */
|
||||
let result;
|
||||
for (let i = 0; i < retries; i++) {
|
||||
result = await spawn(command, { stdio, ...spawnOptions, throwOnError: undefined });
|
||||
|
||||
const { exitCode } = result;
|
||||
if (exitCode !== 255) {
|
||||
break;
|
||||
}
|
||||
|
||||
const sshLogs = readFile(logPath, { encoding: "utf-8" });
|
||||
if (sshLogs.includes("Authenticated")) {
|
||||
break;
|
||||
}
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, (i + 1) * 15000));
|
||||
}
|
||||
|
||||
if (spawnOptions?.throwOnError) {
|
||||
const { error } = result;
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {MachineOptions} options
|
||||
* @returns {Promise<Machine>}
|
||||
*/
|
||||
export async function setupUserData(machine, options) {
|
||||
const { os, userData } = options;
|
||||
if (!userData) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Write user data to a temporary file
|
||||
const tmpFile = mkdtemp("user-data-", os === "windows" ? "setup.ps1" : "setup.sh");
|
||||
await writeFile(tmpFile, userData);
|
||||
|
||||
try {
|
||||
// Upload the script
|
||||
const remotePath = os === "windows" ? "C:\\Windows\\Temp\\setup.ps1" : "/tmp/setup.sh";
|
||||
await machine.upload(tmpFile, remotePath);
|
||||
|
||||
// Execute the script
|
||||
if (os === "windows") {
|
||||
await machine.spawnSafe(["powershell", remotePath], { stdio: "inherit" });
|
||||
} else {
|
||||
await machine.spawnSafe(["bash", remotePath], { stdio: "inherit" });
|
||||
}
|
||||
} finally {
|
||||
// Clean up the temporary file
|
||||
rm(tmpFile);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,6 +43,8 @@ else if (Environment.isDebug)
|
||||
std.fmt.comptimePrint(version_string ++ "-debug+{s}", .{Environment.git_sha_short})
|
||||
else if (Environment.is_canary)
|
||||
std.fmt.comptimePrint(version_string ++ "-canary.{d}+{s}", .{ Environment.canary_revision, Environment.git_sha_short })
|
||||
else if (Environment.isTest)
|
||||
std.fmt.comptimePrint(version_string ++ "-test+{s}", .{Environment.git_sha_short})
|
||||
else
|
||||
std.fmt.comptimePrint(version_string ++ "+{s}", .{Environment.git_sha_short});
|
||||
|
||||
@@ -66,6 +68,7 @@ else
|
||||
"unknown";
|
||||
|
||||
pub inline fn getStartTime() i128 {
|
||||
if (Environment.isTest) return 0;
|
||||
return bun.start_time;
|
||||
}
|
||||
|
||||
|
||||
@@ -430,11 +430,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
else
|
||||
std.mem.page_size;
|
||||
|
||||
pub const InjectOptions = struct {
|
||||
windows_hide_console: bool = false,
|
||||
};
|
||||
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8, inject_options: InjectOptions) bun.FileDescriptor {
|
||||
pub fn inject(bytes: []const u8, self_exe: [:0]const u8) bun.FileDescriptor {
|
||||
var buf: bun.PathBuffer = undefined;
|
||||
var zname: [:0]const u8 = bun.span(bun.fs.FileSystem.instance.tmpname("bun-build", &buf, @as(u64, @bitCast(std.time.milliTimestamp()))) catch |err| {
|
||||
Output.prettyErrorln("<r><red>error<r><d>:<r> failed to get temporary file name: {s}", .{@errorName(err)});
|
||||
@@ -443,11 +439,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
const cleanup = struct {
|
||||
pub fn toClean(name: [:0]const u8, fd: bun.FileDescriptor) void {
|
||||
// Ensure we own the file
|
||||
if (Environment.isPosix) {
|
||||
// Make the file writable so we can delete it
|
||||
_ = Syscall.fchmod(fd, 0o777);
|
||||
}
|
||||
_ = Syscall.close(fd);
|
||||
_ = Syscall.unlink(name);
|
||||
}
|
||||
@@ -474,7 +465,7 @@ pub const StandaloneModuleGraph = struct {
|
||||
bun.invalid_fd,
|
||||
out,
|
||||
// access_mask
|
||||
w.SYNCHRONIZE | w.GENERIC_WRITE | w.GENERIC_READ | w.DELETE,
|
||||
w.SYNCHRONIZE | w.GENERIC_WRITE | w.DELETE,
|
||||
// create disposition
|
||||
w.FILE_OPEN,
|
||||
// create options
|
||||
@@ -641,15 +632,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
_ = bun.C.fchmod(cloned_executable_fd.int(), 0o777);
|
||||
}
|
||||
|
||||
if (Environment.isWindows and inject_options.windows_hide_console) {
|
||||
bun.windows.editWin32BinarySubsystem(.{ .handle = cloned_executable_fd }, .windows_gui) catch |err| {
|
||||
Output.err(err, "failed to disable console on executable", .{});
|
||||
cleanup(zname, cloned_executable_fd);
|
||||
|
||||
Global.exit(1);
|
||||
};
|
||||
}
|
||||
|
||||
return cloned_executable_fd;
|
||||
}
|
||||
|
||||
@@ -677,8 +659,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
outfile: []const u8,
|
||||
env: *bun.DotEnv.Loader,
|
||||
output_format: bun.options.Format,
|
||||
windows_hide_console: bool,
|
||||
windows_icon: ?[]const u8,
|
||||
) !void {
|
||||
const bytes = try toBytes(allocator, module_prefix, output_files, output_format);
|
||||
if (bytes.len == 0) return;
|
||||
@@ -695,7 +675,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
Output.err(err, "failed to download cross-compiled bun executable", .{});
|
||||
Global.exit(1);
|
||||
},
|
||||
.{ .windows_hide_console = windows_hide_console },
|
||||
);
|
||||
fd.assertKind(.system);
|
||||
|
||||
@@ -720,15 +699,6 @@ pub const StandaloneModuleGraph = struct {
|
||||
|
||||
Global.exit(1);
|
||||
};
|
||||
_ = bun.sys.close(fd);
|
||||
|
||||
if (windows_icon) |icon_utf8| {
|
||||
var icon_buf: bun.OSPathBuffer = undefined;
|
||||
const icon = bun.strings.toWPathNormalized(&icon_buf, icon_utf8);
|
||||
bun.windows.rescle.setIcon(outfile_slice, icon) catch {
|
||||
Output.warn("Failed to set executable icon", .{});
|
||||
};
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ const FeatureFlags = @import("./feature_flags.zig");
|
||||
const Environment = @import("./env.zig");
|
||||
const FixedBufferAllocator = std.heap.FixedBufferAllocator;
|
||||
const bun = @import("root").bun;
|
||||
const OOM = bun.OOM;
|
||||
|
||||
pub fn isSliceInBufferT(comptime T: type, slice: []const T, buffer: []const T) bool {
|
||||
return (@intFromPtr(buffer.ptr) <= @intFromPtr(slice.ptr) and
|
||||
@@ -329,7 +328,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
return @constCast(slice);
|
||||
}
|
||||
|
||||
pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]u8 {
|
||||
pub fn appendMutable(self: *Self, comptime AppendType: type, _value: AppendType) ![]u8 {
|
||||
const appended = try @call(bun.callmod_inline, append, .{ self, AppendType, _value });
|
||||
return @constCast(appended);
|
||||
}
|
||||
@@ -338,17 +337,17 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
return try self.appendMutable(EmptyType, EmptyType{ .len = len });
|
||||
}
|
||||
|
||||
pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) OOM![]const u8 {
|
||||
pub fn printWithType(self: *Self, comptime fmt: []const u8, comptime Args: type, args: Args) ![]const u8 {
|
||||
var buf = try self.appendMutable(EmptyType, EmptyType{ .len = std.fmt.count(fmt, args) + 1 });
|
||||
buf[buf.len - 1] = 0;
|
||||
return std.fmt.bufPrint(buf.ptr[0 .. buf.len - 1], fmt, args) catch unreachable;
|
||||
}
|
||||
|
||||
pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) OOM![]const u8 {
|
||||
pub fn print(self: *Self, comptime fmt: []const u8, args: anytype) ![]const u8 {
|
||||
return try printWithType(self, fmt, @TypeOf(args), args);
|
||||
}
|
||||
|
||||
pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 {
|
||||
pub fn append(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 {
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
|
||||
@@ -356,7 +355,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
}
|
||||
|
||||
threadlocal var lowercase_append_buf: bun.PathBuffer = undefined;
|
||||
pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) OOM![]const u8 {
|
||||
pub fn appendLowerCase(self: *Self, comptime AppendType: type, _value: AppendType) ![]const u8 {
|
||||
self.mutex.lock();
|
||||
defer self.mutex.unlock();
|
||||
|
||||
@@ -375,7 +374,7 @@ pub fn BSSStringList(comptime _count: usize, comptime _item_length: usize) type
|
||||
self: *Self,
|
||||
comptime AppendType: type,
|
||||
_value: AppendType,
|
||||
) OOM![]const u8 {
|
||||
) ![]const u8 {
|
||||
const value_len: usize = brk: {
|
||||
switch (comptime AppendType) {
|
||||
EmptyType, []const u8, []u8, [:0]const u8, [:0]u8 => {
|
||||
|
||||
@@ -104,7 +104,6 @@ pub const Features = struct {
|
||||
pub var lifecycle_scripts: usize = 0;
|
||||
pub var loaders: usize = 0;
|
||||
pub var lockfile_migration_from_package_lock: usize = 0;
|
||||
pub var text_lockfile: usize = 0;
|
||||
pub var macros: usize = 0;
|
||||
pub var no_avx2: usize = 0;
|
||||
pub var no_avx: usize = 0;
|
||||
|
||||
@@ -2979,8 +2979,6 @@ pub const Api = struct {
|
||||
|
||||
cafile: ?[]const u8 = null,
|
||||
|
||||
save_text_lockfile: ?bool = null,
|
||||
|
||||
ca: ?union(enum) {
|
||||
str: []const u8,
|
||||
list: []const []const u8,
|
||||
|
||||
6
src/api/tsconfig.json
Normal file
6
src/api/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"moduleResolution": "node"
|
||||
},
|
||||
"include": ["./node_modules/peechy", "./schema.d.ts"]
|
||||
}
|
||||
@@ -162,7 +162,6 @@ pub const FilePoll = struct {
|
||||
const Request = JSC.DNS.InternalDNS.Request;
|
||||
const LifecycleScriptSubprocessOutputReader = bun.install.LifecycleScriptSubprocess.OutputReader;
|
||||
const BufferedReader = bun.io.BufferedReader;
|
||||
|
||||
pub const Owner = bun.TaggedPointerUnion(.{
|
||||
FileSink,
|
||||
|
||||
@@ -387,7 +386,6 @@ pub const FilePoll = struct {
|
||||
var handler: *BufferedReader = ptr.as(BufferedReader);
|
||||
handler.onPoll(size_or_offset, poll.flags.contains(.hup));
|
||||
},
|
||||
|
||||
@field(Owner.Tag, bun.meta.typeBaseName(@typeName(Process))) => {
|
||||
log("onUpdate " ++ kqueue_or_epoll ++ " (fd: {}) Process", .{poll.fd});
|
||||
var loader = ptr.as(Process);
|
||||
|
||||
@@ -314,7 +314,7 @@ pub fn init(options: Options) bun.JSOOM!*DevServer {
|
||||
dev.framework = dev.framework.resolve(&dev.server_bundler.resolver, &dev.client_bundler.resolver, options.arena) catch {
|
||||
if (dev.framework.is_built_in_react)
|
||||
try bake.Framework.addReactInstallCommandNote(&dev.log);
|
||||
return global.throwValue(dev.log.toJSAggregateError(global, bun.String.static("Framework is missing required files!")));
|
||||
return global.throwValue(dev.log.toJSAggregateError(global, "Framework is missing required files!"));
|
||||
};
|
||||
|
||||
errdefer dev.route_lookup.clearAndFree(allocator);
|
||||
@@ -1870,12 +1870,12 @@ pub fn IncrementalGraph(side: bake.Side) type {
|
||||
/// exact size, instead of the log approach that dynamic arrays use.
|
||||
stale_files: DynamicBitSetUnmanaged,
|
||||
|
||||
/// Start of a file's 'dependencies' linked list. These are the other
|
||||
/// files that have imports to this file. Walk this list to discover
|
||||
/// what files are to be reloaded when something changes.
|
||||
/// Start of the 'dependencies' linked list. These are the other files
|
||||
/// that import used by this file. Walk this list to discover what
|
||||
/// files are to be reloaded when something changes.
|
||||
first_dep: ArrayListUnmanaged(EdgeIndex.Optional),
|
||||
/// Start of a file's 'imports' linked lists. These are the files that
|
||||
/// this file imports.
|
||||
/// Start of the 'imports' linked list. These are the files that this
|
||||
/// file imports.
|
||||
first_import: ArrayListUnmanaged(EdgeIndex.Optional),
|
||||
/// `File` objects act as nodes in a directional many-to-many graph,
|
||||
/// where edges represent the imports between modules. An 'dependency'
|
||||
@@ -3319,7 +3319,7 @@ pub const SerializedFailure = struct {
|
||||
}
|
||||
};
|
||||
|
||||
pub const ErrorKind = enum(u8) {
|
||||
const ErrorKind = enum(u8) {
|
||||
// A log message. The `logger.Kind` is encoded here.
|
||||
bundler_log_err = 0,
|
||||
bundler_log_warn = 1,
|
||||
|
||||
@@ -1084,10 +1084,10 @@ pub const JSFrameworkRouter = struct {
|
||||
const validators = bun.JSC.Node.validators;
|
||||
|
||||
pub fn getBindings(global: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSObject.create(.{
|
||||
return global.createObjectFromStruct(.{
|
||||
.parseRoutePattern = global.createHostFunction("parseRoutePattern", parseRoutePattern, 1),
|
||||
.FrameworkRouter = codegen.getConstructor(global),
|
||||
}, global).toJS();
|
||||
}).toJS();
|
||||
}
|
||||
|
||||
pub fn constructor(global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) !*JSFrameworkRouter {
|
||||
@@ -1165,7 +1165,7 @@ pub const JSFrameworkRouter = struct {
|
||||
var sfb = std.heap.stackFallback(4096, bun.default_allocator);
|
||||
const alloc = sfb.get();
|
||||
|
||||
return JSC.JSObject.create(.{
|
||||
return global.createObjectFromStruct(.{
|
||||
.params = if (params_out.params.len > 0) params: {
|
||||
const obj = JSValue.createEmptyObject(global, params_out.params.len);
|
||||
for (params_out.params.slice()) |param| {
|
||||
@@ -1176,7 +1176,7 @@ pub const JSFrameworkRouter = struct {
|
||||
break :params obj;
|
||||
} else .null,
|
||||
.route = try jsfr.routeToJsonInverse(global, index, alloc),
|
||||
}, global).toJS();
|
||||
}).toJS();
|
||||
}
|
||||
|
||||
return .null;
|
||||
@@ -1193,7 +1193,7 @@ pub const JSFrameworkRouter = struct {
|
||||
|
||||
fn routeToJson(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return JSC.JSObject.create(.{
|
||||
return global.createObjectFromStruct(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
@@ -1212,12 +1212,12 @@ pub const JSFrameworkRouter = struct {
|
||||
}
|
||||
break :brk arr;
|
||||
},
|
||||
}, global).toJS();
|
||||
}).toJS();
|
||||
}
|
||||
|
||||
fn routeToJsonInverse(jsfr: *JSFrameworkRouter, global: *JSGlobalObject, route_index: Route.Index, allocator: Allocator) !JSValue {
|
||||
const route = jsfr.router.routePtr(route_index);
|
||||
return JSC.JSObject.create(.{
|
||||
return global.createObjectFromStruct(.{
|
||||
.part = try partToJS(global, route.part, allocator),
|
||||
.page = jsfr.fileIdToJS(global, route.file_page),
|
||||
.layout = jsfr.fileIdToJS(global, route.file_layout),
|
||||
@@ -1226,7 +1226,7 @@ pub const JSFrameworkRouter = struct {
|
||||
try routeToJsonInverse(jsfr, global, parent, allocator)
|
||||
else
|
||||
.null,
|
||||
}, global).toJS();
|
||||
}).toJS();
|
||||
}
|
||||
|
||||
pub fn finalize(this: *JSFrameworkRouter) void {
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
// import { t } from "bindgen";
|
||||
|
||||
// export const ReactFastRefresh = t.dictionary({
|
||||
// importSource: t.UTF8String,
|
||||
// });
|
||||
|
||||
// export const FrameworkConfig = t.dictionary({
|
||||
// reactFastRefresh: t.oneOf(t.boolean, ReactFastRefresh).default(false),
|
||||
// });
|
||||
@@ -5,8 +5,8 @@
|
||||
import * as React from "react";
|
||||
import { hydrateRoot } from "react-dom/client";
|
||||
import { createFromReadableStream } from "react-server-dom-bun/client.browser";
|
||||
import { onServerSideReload } from "bun:bake/client";
|
||||
import { flushSync } from "react-dom";
|
||||
import { onServerSideReload } from 'bun:bake/client';
|
||||
import { flushSync } from 'react-dom';
|
||||
|
||||
const te = new TextEncoder();
|
||||
const td = new TextDecoder();
|
||||
@@ -74,7 +74,7 @@ const Root = () => {
|
||||
const root = hydrateRoot(document, <Root />, {
|
||||
onUncaughtError(e) {
|
||||
console.error(e);
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
// Keep a cache of page objects to avoid re-fetching a page when pressing the
|
||||
@@ -118,7 +118,7 @@ const firstPageId = Date.now();
|
||||
// This is done client-side because a React error will unmount all elements.
|
||||
const sheet = new CSSStyleSheet();
|
||||
document.adoptedStyleSheets.push(sheet);
|
||||
sheet.replaceSync(":where(*)::view-transition-group(root){animation:none}");
|
||||
sheet.replaceSync(':where(*)::view-transition-group(root){animation:none}');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -142,9 +142,10 @@ async function goto(href: string, cacheId?: number) {
|
||||
if (cached) {
|
||||
currentCssList = cached.css;
|
||||
await ensureCssIsReady(currentCssList);
|
||||
setPage?.((rscPayload = cached.element));
|
||||
setPage?.(rscPayload = cached.element);
|
||||
console.log("cached", cached);
|
||||
if (olderController?.signal.aborted === false) abortOnRender = olderController;
|
||||
if (olderController?.signal.aborted === false)
|
||||
abortOnRender = olderController;
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -198,7 +199,7 @@ async function goto(href: string, cacheId?: number) {
|
||||
// Save this promise so that pressing the back button in the browser navigates
|
||||
// to the same instance of the old page, instead of re-fetching it.
|
||||
if (cacheId) {
|
||||
cachedPages.set(cacheId, { css: currentCssList!, element: p });
|
||||
cachedPages.set(cacheId, { css: currentCssList, element: p });
|
||||
}
|
||||
|
||||
// Defer aborting a previous request until VERY late. If a previous stream is
|
||||
@@ -213,7 +214,8 @@ async function goto(href: string, cacheId?: number) {
|
||||
if (document.startViewTransition as unknown) {
|
||||
document.startViewTransition(() => {
|
||||
flushSync(() => {
|
||||
if (thisNavigationId === lastNavigationId) setPage((rscPayload = p));
|
||||
if (thisNavigationId === lastNavigationId)
|
||||
setPage(rscPayload = p);
|
||||
});
|
||||
});
|
||||
} else {
|
||||
@@ -340,8 +342,8 @@ window.addEventListener("popstate", event => {
|
||||
|
||||
if (import.meta.env.DEV) {
|
||||
// Frameworks can call `onServerSideReload` to hook into server-side hot
|
||||
// module reloading.
|
||||
onServerSideReload(async () => {
|
||||
// module reloading.
|
||||
onServerSideReload(async() => {
|
||||
const newId = Date.now();
|
||||
history.replaceState(newId, "", location.href);
|
||||
await goto(location.href, newId);
|
||||
@@ -353,7 +355,7 @@ if (import.meta.env.DEV) {
|
||||
onServerSideReload,
|
||||
get currentCssList() {
|
||||
return currentCssList;
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -415,7 +417,7 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
|
||||
}
|
||||
if (chunks.length === 1) {
|
||||
const first = chunks[0];
|
||||
if (first.byteLength >= size) {
|
||||
if(first.byteLength >= size) {
|
||||
chunks[0] = first.subarray(size);
|
||||
totalBytes -= size;
|
||||
return first.subarray(0, size);
|
||||
@@ -444,14 +446,14 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
const header = new Uint32Array(await readChunk(4))[0];
|
||||
console.log("h", header);
|
||||
const header = new Uint32Array(await readChunk(4))[0];
|
||||
console.log('h', header);
|
||||
if (header === 0) {
|
||||
currentCssList = [];
|
||||
} else {
|
||||
currentCssList = td.decode(await readChunk(header)).split("\n");
|
||||
}
|
||||
console.log("cc", currentCssList);
|
||||
console.log('cc', currentCssList);
|
||||
if (chunks.length === 0) {
|
||||
return stream;
|
||||
}
|
||||
@@ -472,6 +474,6 @@ async function readCssMetadataFallback(stream: ReadableStream<Uint8Array>) {
|
||||
},
|
||||
cancel() {
|
||||
reader.cancel();
|
||||
},
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import type { Readable } from "node:stream";
|
||||
import { EventEmitter } from "node:events";
|
||||
import { createFromNodeStream, type Manifest } from "react-server-dom-bun/client.node.unbundled.js";
|
||||
import { renderToPipeableStream } from "react-dom/server.node";
|
||||
import type { MiniAbortSignal } from "./server";
|
||||
import { MiniAbortSignal } from "./server";
|
||||
|
||||
// Verify that React 19 is being used.
|
||||
if (!React.use) {
|
||||
|
||||
@@ -56,7 +56,7 @@ export class HotModule<E = any> {
|
||||
mod._deps.set(this, onReload ? { _callback: onReload, _expectedImports: expectedImports } : undefined);
|
||||
const { exports, __esModule } = mod;
|
||||
const object = __esModule ? exports : (mod._ext_exports ??= { ...exports, default: exports });
|
||||
|
||||
|
||||
if (expectedImports && mod._state === State.Ready) {
|
||||
for (const key of expectedImports) {
|
||||
if (!(key in object)) {
|
||||
@@ -156,16 +156,14 @@ class Hot {
|
||||
}
|
||||
|
||||
function isUnsupportedViteEventName(str: string) {
|
||||
return (
|
||||
str === "vite:beforeUpdate" ||
|
||||
str === "vite:afterUpdate" ||
|
||||
str === "vite:beforeFullReload" ||
|
||||
str === "vite:beforePrune" ||
|
||||
str === "vite:invalidate" ||
|
||||
str === "vite:error" ||
|
||||
str === "vite:ws:disconnect" ||
|
||||
str === "vite:ws:connect"
|
||||
);
|
||||
return str === 'vite:beforeUpdate'
|
||||
|| str === 'vite:afterUpdate'
|
||||
|| str === 'vite:beforeFullReload'
|
||||
|| str === 'vite:beforePrune'
|
||||
|| str === 'vite:invalidate'
|
||||
|| str === 'vite:error'
|
||||
|| str === 'vite:ws:disconnect'
|
||||
|| str === 'vite:ws:connect';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -198,7 +196,7 @@ export function loadModule<T = any>(key: Id, type: LoadModuleType): HotModule<T>
|
||||
load(mod);
|
||||
mod._state = State.Ready;
|
||||
mod._deps.forEach((entry, dep) => {
|
||||
entry?._callback(mod.exports);
|
||||
entry._callback?.(mod.exports);
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
@@ -214,7 +212,7 @@ export const getModule = registry.get.bind(registry);
|
||||
export function replaceModule(key: Id, load: ModuleLoadFunction) {
|
||||
const module = registry.get(key);
|
||||
if (module) {
|
||||
module._onDispose?.forEach(cb => cb(null));
|
||||
module._onDispose?.forEach((cb) => cb(null));
|
||||
module.exports = {};
|
||||
load(module);
|
||||
const { exports } = module;
|
||||
@@ -270,7 +268,7 @@ if (side === "client") {
|
||||
const server_module = new HotModule("bun:bake/client");
|
||||
server_module.__esModule = true;
|
||||
server_module.exports = {
|
||||
onServerSideReload: async cb => {
|
||||
onServerSideReload: async (cb) => {
|
||||
onServerSideReload = cb;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -54,7 +54,7 @@ initWebSocket({
|
||||
}
|
||||
},
|
||||
|
||||
// [MessageId.errors_cleared]() {
|
||||
// location.reload();
|
||||
// },
|
||||
[MessageId.errors_cleared]() {
|
||||
location.reload();
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
{
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"lib": ["ESNext", "DOM", "DOM.Iterable", "DOM.AsyncIterable"],
|
||||
"lib": ["DOM", "ESNext"],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "Bundler",
|
||||
"allowImportingTsExtensions": true,
|
||||
"noEmit": true,
|
||||
"strict": true,
|
||||
"noImplicitAny": false,
|
||||
"allowJs": true,
|
||||
"downlevelIteration": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"paths": {
|
||||
"bun-framework-react/*": ["./bun-framework-react/*"],
|
||||
"bindgen": ["../codegen/bindgen-lib"]
|
||||
"bun-framework-react/*": ["./bun-framework-react/*"]
|
||||
},
|
||||
"jsx": "react-jsx",
|
||||
"types": ["react/experimental"]
|
||||
},
|
||||
"include": ["**/*.ts", "**/*.tsx", "../runtime.js", "../runtime.bun.js"],
|
||||
"references": [{ "path": "../../packages/bun-types" }]
|
||||
"include": ["**/*.ts", "**/*.tsx"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user