mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 02:18:47 +00:00
Compare commits
214 Commits
nektro-pat
...
jarred/upd
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5164b69b57 | ||
|
|
eec8f5afcf | ||
|
|
a7214ab61c | ||
|
|
796a9854b4 | ||
|
|
0d79861a94 | ||
|
|
a8a4f28046 | ||
|
|
f2ea202730 | ||
|
|
eddbafadfb | ||
|
|
74e75d6925 | ||
|
|
6d25d816a0 | ||
|
|
4a4039fcb3 | ||
|
|
3f2a2e25ce | ||
|
|
f4381bb297 | ||
|
|
ab73c7b323 | ||
|
|
6ba2feb8d2 | ||
|
|
88aad6aeb1 | ||
|
|
a8536a7337 | ||
|
|
cb2ee39bfa | ||
|
|
3f197d1ce0 | ||
|
|
26745bb5f3 | ||
|
|
23e64279d8 | ||
|
|
ae113559c6 | ||
|
|
506d9b81a7 | ||
|
|
3a95a74b7f | ||
|
|
c3f9d77391 | ||
|
|
cbb6f8b1a9 | ||
|
|
88ac58962e | ||
|
|
73247e5c91 | ||
|
|
5015c6a969 | ||
|
|
72f1c676b9 | ||
|
|
3d9028f0ee | ||
|
|
3f10c87906 | ||
|
|
e0fa2e78da | ||
|
|
4ca1e17778 | ||
|
|
6e2c6cd6ea | ||
|
|
d0f91082fc | ||
|
|
44fce3c5e8 | ||
|
|
43380a4d68 | ||
|
|
f93472101a | ||
|
|
19f4c59b42 | ||
|
|
a1dd2a2a32 | ||
|
|
8070da6ec9 | ||
|
|
b1dd6cf400 | ||
|
|
5ffd8e40b3 | ||
|
|
f8b7e45f0c | ||
|
|
8444492e3c | ||
|
|
7346cdaa5a | ||
|
|
94304788dd | ||
|
|
cdb9af36c1 | ||
|
|
aa554728f1 | ||
|
|
48e56be05e | ||
|
|
b97aca7fa1 | ||
|
|
5d208f9ea0 | ||
|
|
f9b02d2891 | ||
|
|
024b2ea94e | ||
|
|
797b2ff557 | ||
|
|
7d6950da46 | ||
|
|
af7054c69b | ||
|
|
a93c53651c | ||
|
|
38fe54261d | ||
|
|
7c400c9b24 | ||
|
|
c17c200cfd | ||
|
|
1cc15b6c20 | ||
|
|
e1a8852706 | ||
|
|
981759fafa | ||
|
|
f584a38a72 | ||
|
|
a0d6d02fe8 | ||
|
|
58d77ab827 | ||
|
|
0fb2584f15 | ||
|
|
72813b5b48 | ||
|
|
d49df1df57 | ||
|
|
44bab947c6 | ||
|
|
a6bc130918 | ||
|
|
73452660fd | ||
|
|
52f37e4fe4 | ||
|
|
7dc3ee4c89 | ||
|
|
1d3588ef5a | ||
|
|
4a3b4953ee | ||
|
|
09ceececba | ||
|
|
cfda423c01 | ||
|
|
95d5bc78f2 | ||
|
|
6c2d19c1b0 | ||
|
|
55dcde581d | ||
|
|
b876a8d480 | ||
|
|
0657f16e27 | ||
|
|
42aaa8eb81 | ||
|
|
cd91772f2f | ||
|
|
f1403901d9 | ||
|
|
664dbf569c | ||
|
|
95a3b72e94 | ||
|
|
efe1479299 | ||
|
|
6dde9a7540 | ||
|
|
474df61a86 | ||
|
|
7c576d7b62 | ||
|
|
4e1619c17a | ||
|
|
54d9969b4c | ||
|
|
91d6bf26b9 | ||
|
|
2172f3c5e3 | ||
|
|
d44fa1ca92 | ||
|
|
e72ad4777c | ||
|
|
f1bcf07e2b | ||
|
|
cbf0b77e52 | ||
|
|
b6e7f01e6a | ||
|
|
d1b3bce067 | ||
|
|
05b9e89417 | ||
|
|
ebefe97073 | ||
|
|
84ea80b813 | ||
|
|
3bcce51fa4 | ||
|
|
2c212929f8 | ||
|
|
9b47a8791e | ||
|
|
ca2897b466 | ||
|
|
453cfa5689 | ||
|
|
7337f27a7e | ||
|
|
96a33924fb | ||
|
|
bc794e89ed | ||
|
|
bb7404d6bc | ||
|
|
f47d7b3d2d | ||
|
|
831a461916 | ||
|
|
0e6c46819a | ||
|
|
9b5f317c5b | ||
|
|
63b8182b7c | ||
|
|
1e0eb4012a | ||
|
|
bc4e76c2a5 | ||
|
|
9c2c005b58 | ||
|
|
84472ed57f | ||
|
|
d04ef8c53f | ||
|
|
f2a9dc9eea | ||
|
|
1cd6b587a2 | ||
|
|
28534b2e34 | ||
|
|
6c7eeb2030 | ||
|
|
9b206cca2b | ||
|
|
06fbc24b11 | ||
|
|
a2637e9016 | ||
|
|
244ae8c593 | ||
|
|
f7ed006a08 | ||
|
|
5f72442386 | ||
|
|
1957f0fc23 | ||
|
|
37e17be7aa | ||
|
|
ac4ac8f5a8 | ||
|
|
45b55a8970 | ||
|
|
63e622f2f3 | ||
|
|
cee857ac4e | ||
|
|
6475442469 | ||
|
|
23220fd348 | ||
|
|
4f1d32be16 | ||
|
|
f502d0f1a4 | ||
|
|
4d6a8f598a | ||
|
|
78fa4c4f87 | ||
|
|
1c80859431 | ||
|
|
957e871f4a | ||
|
|
0840845d68 | ||
|
|
2ef6397ab9 | ||
|
|
9ccb4dd082 | ||
|
|
d8b1d29656 | ||
|
|
778c24f176 | ||
|
|
248d1a7a93 | ||
|
|
e1f996e1b8 | ||
|
|
dbcddc79fc | ||
|
|
7243945291 | ||
|
|
b42b239344 | ||
|
|
87771ba895 | ||
|
|
28fce4aac1 | ||
|
|
d8828b69d8 | ||
|
|
80037859ec | ||
|
|
51df94e599 | ||
|
|
f9a74df73d | ||
|
|
2c20d88e8d | ||
|
|
8df97221a4 | ||
|
|
cf4d0fe3b6 | ||
|
|
a5f1670e92 | ||
|
|
d75a1deb4a | ||
|
|
033b74cc2a | ||
|
|
2b3c0584c6 | ||
|
|
b7d8fe2f35 | ||
|
|
7d3b0e7daa | ||
|
|
32cdc13f63 | ||
|
|
2f4cd402e4 | ||
|
|
6b863d5d51 | ||
|
|
79223472f7 | ||
|
|
8c4917fe60 | ||
|
|
f4267e2d1f | ||
|
|
96ff169e46 | ||
|
|
741e1513b7 | ||
|
|
3708dd4484 | ||
|
|
2cbd4c9d80 | ||
|
|
e0d01a9a91 | ||
|
|
e1df98878d | ||
|
|
596f3c064a | ||
|
|
29fe5b730f | ||
|
|
082d184848 | ||
|
|
2e8d6d549d | ||
|
|
6431b90b9e | ||
|
|
4c60accdc1 | ||
|
|
808e5cfac3 | ||
|
|
0bfd74af55 | ||
|
|
83ff3453dc | ||
|
|
e034383833 | ||
|
|
1d44b63675 | ||
|
|
468927c14b | ||
|
|
c8a8da370c | ||
|
|
195c69606b | ||
|
|
8db9c7650c | ||
|
|
818d014931 | ||
|
|
f59ec8d6c0 | ||
|
|
006ca4f13c | ||
|
|
107310d785 | ||
|
|
fd56d41c8e | ||
|
|
daf9ea419b | ||
|
|
ac83057d08 | ||
|
|
2567243c8d | ||
|
|
38c7eb73c1 | ||
|
|
a32116476a | ||
|
|
4e3f680ac4 | ||
|
|
b37acf309c |
@@ -1,170 +0,0 @@
|
||||
ARG LLVM_VERSION="19"
|
||||
ARG REPORTED_LLVM_VERSION="19.1.7"
|
||||
ARG OLD_BUN_VERSION="1.1.38"
|
||||
ARG DEFAULT_CFLAGS="-mno-omit-leaf-frame-pointer -fno-omit-frame-pointer -ffunction-sections -fdata-sections -faddrsig -fno-unwind-tables -fno-asynchronous-unwind-tables"
|
||||
ARG DEFAULT_CXXFLAGS="-flto=full -fwhole-program-vtables -fforce-emit-vtables"
|
||||
ARG BUILDKITE_AGENT_TAGS="queue=linux,os=linux,arch=${TARGETARCH}"
|
||||
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-arm64
|
||||
FROM --platform=$BUILDPLATFORM ubuntu:20.04 as base-amd64
|
||||
FROM base-$TARGETARCH as base
|
||||
|
||||
ARG LLVM_VERSION
|
||||
ARG OLD_BUN_VERSION
|
||||
ARG TARGETARCH
|
||||
ARG DEFAULT_CXXFLAGS
|
||||
ARG DEFAULT_CFLAGS
|
||||
ARG REPORTED_LLVM_VERSION
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive \
|
||||
CI=true \
|
||||
DOCKER=true
|
||||
|
||||
RUN echo "Acquire::Queue-Mode \"host\";" > /etc/apt/apt.conf.d/99-apt-queue-mode.conf \
|
||||
&& echo "Acquire::Timeout \"120\";" >> /etc/apt/apt.conf.d/99-apt-timeout.conf \
|
||||
&& echo "Acquire::Retries \"3\";" >> /etc/apt/apt.conf.d/99-apt-retries.conf \
|
||||
&& echo "APT::Install-Recommends \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-recommends.conf \
|
||||
&& echo "APT::Install-Suggests \"false\";" >> /etc/apt/apt.conf.d/99-apt-install-suggests.conf
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
wget curl git python3 python3-pip ninja-build \
|
||||
software-properties-common apt-transport-https \
|
||||
ca-certificates gnupg lsb-release unzip \
|
||||
libxml2-dev ruby ruby-dev bison gawk perl make golang \
|
||||
&& add-apt-repository ppa:ubuntu-toolchain-r/test \
|
||||
&& apt-get update \
|
||||
&& apt-get install -y gcc-13 g++-13 libgcc-13-dev libstdc++-13-dev \
|
||||
libasan6 libubsan1 libatomic1 libtsan0 liblsan0 \
|
||||
libgfortran5 libc6-dev \
|
||||
&& wget https://apt.llvm.org/llvm.sh \
|
||||
&& chmod +x llvm.sh \
|
||||
&& ./llvm.sh ${LLVM_VERSION} all \
|
||||
&& rm llvm.sh
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
cmake_version="3.30.5" && \
|
||||
if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-aarch64.sh"; \
|
||||
else \
|
||||
cmake_url="https://github.com/Kitware/CMake/releases/download/v${cmake_version}/cmake-${cmake_version}-linux-x86_64.sh"; \
|
||||
fi && \
|
||||
wget -O /tmp/cmake.sh "$cmake_url" && \
|
||||
sh /tmp/cmake.sh --skip-license --prefix=/usr
|
||||
|
||||
RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 130 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-13 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-13 \
|
||||
--slave /usr/bin/gcc-nm gcc-nm /usr/bin/gcc-nm-13 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-13
|
||||
|
||||
RUN echo "ARCH_PATH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64-linux-gnu" || echo "x86_64-linux-gnu")" >> /etc/environment \
|
||||
&& echo "BUN_ARCH=$([ "$TARGETARCH" = "arm64" ] && echo "aarch64" || echo "x64")" >> /etc/environment
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
LIBRARY_PATH=/usr/lib/gcc/${ARCH_PATH}/13:/usr/lib/${ARCH_PATH} \
|
||||
CPLUS_INCLUDE_PATH=/usr/include/c++/13:/usr/include/${ARCH_PATH}/c++/13 \
|
||||
C_INCLUDE_PATH=/usr/lib/gcc/${ARCH_PATH}/13/include \
|
||||
CFLAGS=${DEFAULT_CFLAGS} \
|
||||
CXXFLAGS="${DEFAULT_CFLAGS} ${DEFAULT_CXXFLAGS}"
|
||||
|
||||
RUN if [ "$TARGETARCH" = "arm64" ]; then \
|
||||
export ARCH_PATH="aarch64-linux-gnu"; \
|
||||
else \
|
||||
export ARCH_PATH="x86_64-linux-gnu"; \
|
||||
fi \
|
||||
&& mkdir -p /usr/lib/gcc/${ARCH_PATH}/13 \
|
||||
&& ln -sf /usr/lib/${ARCH_PATH}/libstdc++.so.6 /usr/lib/gcc/${ARCH_PATH}/13/ \
|
||||
&& echo "/usr/lib/gcc/${ARCH_PATH}/13" > /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& echo "/usr/lib/${ARCH_PATH}" >> /etc/ld.so.conf.d/gcc-13.conf \
|
||||
&& ldconfig
|
||||
|
||||
RUN for f in /usr/lib/llvm-${LLVM_VERSION}/bin/*; do ln -sf "$f" /usr/bin; done \
|
||||
&& ln -sf /usr/bin/clang-${LLVM_VERSION} /usr/bin/clang \
|
||||
&& ln -sf /usr/bin/clang++-${LLVM_VERSION} /usr/bin/clang++ \
|
||||
&& ln -sf /usr/bin/lld-${LLVM_VERSION} /usr/bin/lld \
|
||||
&& ln -sf /usr/bin/lldb-${LLVM_VERSION} /usr/bin/lldb \
|
||||
&& ln -sf /usr/bin/clangd-${LLVM_VERSION} /usr/bin/clangd \
|
||||
&& ln -sf /usr/bin/llvm-ar-${LLVM_VERSION} /usr/bin/llvm-ar \
|
||||
&& ln -sf /usr/bin/ld.lld /usr/bin/ld \
|
||||
&& ln -sf /usr/bin/clang /usr/bin/cc \
|
||||
&& ln -sf /usr/bin/clang++ /usr/bin/c++
|
||||
|
||||
ENV CC="clang" \
|
||||
CXX="clang++" \
|
||||
AR="llvm-ar-${LLVM_VERSION}" \
|
||||
RANLIB="llvm-ranlib-${LLVM_VERSION}" \
|
||||
LD="lld-${LLVM_VERSION}"
|
||||
|
||||
RUN --mount=type=tmpfs,target=/tmp \
|
||||
bash -c '\
|
||||
set -euxo pipefail && \
|
||||
source /etc/environment && \
|
||||
echo "Downloading bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip from https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip" && \
|
||||
curl -fsSL https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases/bun-v${OLD_BUN_VERSION}/bun-linux-$BUN_ARCH.zip -o /tmp/bun.zip && \
|
||||
unzip /tmp/bun.zip -d /tmp/bun && \
|
||||
mv /tmp/bun/*/bun /usr/bin/bun && \
|
||||
chmod +x /usr/bin/bun'
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as buildkite
|
||||
ARG BUILDKITE_AGENT_TAGS
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
|
||||
RUN ARCH=$(if [ "$TARGETARCH" = "arm64" ]; then echo "arm64"; else echo "amd64"; fi) && \
|
||||
echo "Downloading buildkite" && \
|
||||
curl -fsSL "https://github.com/buildkite/agent/releases/download/v3.87.0/buildkite-agent-linux-${ARCH}-3.87.0.tar.gz" -o /tmp/buildkite-agent.tar.gz && \
|
||||
mkdir -p /tmp/buildkite-agent && \
|
||||
tar -xzf /tmp/buildkite-agent.tar.gz -C /tmp/buildkite-agent && \
|
||||
mv /tmp/buildkite-agent/buildkite-agent /usr/bin/buildkite-agent
|
||||
|
||||
RUN mkdir -p /var/cache/buildkite-agent /var/log/buildkite-agent /var/run/buildkite-agent /etc/buildkite-agent /var/lib/buildkite-agent/cache/bun
|
||||
|
||||
COPY ../*/agent.mjs /var/bun/scripts/
|
||||
|
||||
ENV BUN_INSTALL_CACHE=/var/lib/buildkite-agent/cache/bun
|
||||
ENV BUILDKITE_AGENT_TAGS=${BUILDKITE_AGENT_TAGS}
|
||||
|
||||
|
||||
WORKDIR /var/bun/scripts
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
|
||||
CMD ["bun", "/var/bun/scripts/agent.mjs", "start"]
|
||||
|
||||
FROM --platform=$BUILDPLATFORM base as bun-build-linux-local
|
||||
|
||||
ARG LLVM_VERSION
|
||||
WORKDIR /workspace/bun
|
||||
|
||||
COPY . /workspace/bun
|
||||
|
||||
|
||||
# Install Rust nightly
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \
|
||||
&& export PATH=$HOME/.cargo/bin:$PATH \
|
||||
&& rustup install nightly \
|
||||
&& rustup default nightly
|
||||
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
|
||||
ENV LLVM_VERSION=${REPORTED_LLVM_VERSION}
|
||||
|
||||
|
||||
RUN --mount=type=tmpfs,target=/workspace/bun/build \
|
||||
ls -la \
|
||||
&& bun run build:release \
|
||||
&& mkdir -p /target \
|
||||
&& cp -r /workspace/bun/build/release/bun /target/bun
|
||||
@@ -1,122 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Check if running as root
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "error: must run as root"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check OS compatibility
|
||||
if ! command -v dnf &> /dev/null; then
|
||||
echo "error: this script requires dnf (RHEL/Fedora/CentOS)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure /tmp/agent.mjs, /tmp/Dockerfile are present
|
||||
if [ ! -f /tmp/agent.mjs ] || [ ! -f /tmp/Dockerfile ]; then
|
||||
# Print each missing file
|
||||
if [ ! -f /tmp/agent.mjs ]; then
|
||||
echo "error: /tmp/agent.mjs is missing"
|
||||
fi
|
||||
if [ ! -f /tmp/Dockerfile ]; then
|
||||
echo "error: /tmp/Dockerfile is missing"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Install Docker
|
||||
dnf update -y
|
||||
dnf install -y docker
|
||||
|
||||
systemctl enable docker
|
||||
systemctl start docker || {
|
||||
echo "error: failed to start Docker"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create builder
|
||||
docker buildx create --name builder --driver docker-container --bootstrap --use || {
|
||||
echo "error: failed to create Docker buildx builder"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Set up Docker to start on boot
|
||||
cat << 'EOF' > /etc/systemd/system/buildkite-agent.service
|
||||
[Unit]
|
||||
Description=Buildkite Docker Container
|
||||
After=docker.service network-online.target
|
||||
Requires=docker.service network-online.target
|
||||
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
ExecStartPre=-/usr/bin/docker stop buildkite
|
||||
ExecStartPre=-/usr/bin/docker rm buildkite
|
||||
ExecStart=/usr/bin/docker run \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
--network host \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v /tmp:/tmp \
|
||||
buildkite:latest
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
|
||||
echo "Building Buildkite image"
|
||||
|
||||
# Clean up any previous build artifacts
|
||||
rm -rf /tmp/fakebun
|
||||
mkdir -p /tmp/fakebun/scripts /tmp/fakebun/.buildkite
|
||||
|
||||
# Copy required files
|
||||
cp /tmp/agent.mjs /tmp/fakebun/scripts/ || {
|
||||
echo "error: failed to copy agent.mjs"
|
||||
exit 1
|
||||
}
|
||||
cp /tmp/Dockerfile /tmp/fakebun/.buildkite/Dockerfile || {
|
||||
echo "error: failed to copy Dockerfile"
|
||||
exit 1
|
||||
}
|
||||
|
||||
cd /tmp/fakebun || {
|
||||
echo "error: failed to change directory"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build the Buildkite image
|
||||
docker buildx build \
|
||||
--platform $(uname -m | sed 's/aarch64/linux\/arm64/;s/x86_64/linux\/amd64/') \
|
||||
--tag buildkite:latest \
|
||||
--target buildkite \
|
||||
-f .buildkite/Dockerfile \
|
||||
--load \
|
||||
. || {
|
||||
echo "error: Docker build failed"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Create container to ensure image is cached in AMI
|
||||
docker container create \
|
||||
--name buildkite \
|
||||
--restart=unless-stopped \
|
||||
buildkite:latest || {
|
||||
echo "error: failed to create buildkite container"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Reload systemd to pick up new service
|
||||
systemctl daemon-reload
|
||||
|
||||
# Enable the service, but don't start it yet
|
||||
systemctl enable buildkite-agent || {
|
||||
echo "error: failed to enable buildkite-agent service"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Bootstrap complete"
|
||||
echo "To start the Buildkite agent, run: "
|
||||
echo " systemctl start buildkite-agent"
|
||||
@@ -1,16 +0,0 @@
|
||||
# Uploads the latest CI workflow to Buildkite.
|
||||
# https://buildkite.com/docs/pipelines/defining-steps
|
||||
#
|
||||
# Changes to this file must be manually edited here:
|
||||
# https://buildkite.com/bun/bun/settings/steps
|
||||
steps:
|
||||
- if: "build.pull_request.repository.fork"
|
||||
block: ":eyes:"
|
||||
prompt: "Did you review the PR?"
|
||||
blocked_state: "running"
|
||||
|
||||
- label: ":pipeline:"
|
||||
agents:
|
||||
queue: "build-darwin"
|
||||
command:
|
||||
- "node .buildkite/ci.mjs"
|
||||
1206
.buildkite/ci.mjs
1206
.buildkite/ci.mjs
File diff suppressed because it is too large
Load Diff
@@ -1,7 +0,0 @@
|
||||
import { getCommit, getSecret } from "../../scripts/utils.mjs";
|
||||
|
||||
console.log("Submitting...");
|
||||
const response = await fetch(getSecret("BENCHMARK_URL") + "?tag=_&commit=" + getCommit() + "&artifact_url=_", {
|
||||
method: "POST",
|
||||
});
|
||||
console.log("Got status " + response.status);
|
||||
@@ -1,254 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -eo pipefail
|
||||
|
||||
function assert_main() {
|
||||
if [ -z "$BUILDKITE_REPO" ]; then
|
||||
echo "error: Cannot find repository for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -z "$BUILDKITE_COMMIT" ]; then
|
||||
echo "error: Cannot find commit for this build"
|
||||
exit 1
|
||||
fi
|
||||
if [ -n "$BUILDKITE_PULL_REQUEST_REPO" ] && [ "$BUILDKITE_REPO" != "$BUILDKITE_PULL_REQUEST_REPO" ]; then
|
||||
echo "error: Cannot upload release from a fork"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_PULL_REQUEST" != "false" ]; then
|
||||
echo "error: Cannot upload release from a pull request"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$BUILDKITE_BRANCH" != "main" ]; then
|
||||
echo "error: Cannot upload release from a branch other than main"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_agent() {
|
||||
if ! command -v "buildkite-agent" &> /dev/null; then
|
||||
echo "error: Cannot find buildkite-agent, please install it:"
|
||||
echo "https://buildkite.com/docs/agent/v3/install"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_github() {
|
||||
assert_command "gh" "gh" "https://github.com/cli/cli#installation"
|
||||
assert_buildkite_secret "GITHUB_TOKEN"
|
||||
# gh expects the token in $GH_TOKEN
|
||||
export GH_TOKEN="$GITHUB_TOKEN"
|
||||
}
|
||||
|
||||
function assert_aws() {
|
||||
assert_command "aws" "awscli" "https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html"
|
||||
for secret in "AWS_ACCESS_KEY_ID" "AWS_SECRET_ACCESS_KEY" "AWS_ENDPOINT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
assert_buildkite_secret "AWS_BUCKET" --skip-redaction
|
||||
}
|
||||
|
||||
function assert_sentry() {
|
||||
assert_command "sentry-cli" "getsentry/tools/sentry-cli" "https://docs.sentry.io/cli/installation/"
|
||||
for secret in "SENTRY_AUTH_TOKEN" "SENTRY_ORG" "SENTRY_PROJECT"; do
|
||||
assert_buildkite_secret "$secret"
|
||||
done
|
||||
}
|
||||
|
||||
function run_command() {
|
||||
set -x
|
||||
"$@"
|
||||
{ set +x; } 2>/dev/null
|
||||
}
|
||||
|
||||
function assert_command() {
|
||||
local command="$1"
|
||||
local package="$2"
|
||||
local help_url="$3"
|
||||
if ! command -v "$command" &> /dev/null; then
|
||||
echo "warning: $command is not installed, installing..."
|
||||
if command -v brew &> /dev/null; then
|
||||
HOMEBREW_NO_AUTO_UPDATE=1 run_command brew install "$package"
|
||||
else
|
||||
echo "error: Cannot install $command, please install it"
|
||||
if [ -n "$help_url" ]; then
|
||||
echo ""
|
||||
echo "hint: See $help_url for help"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function assert_buildkite_secret() {
|
||||
local key="$1"
|
||||
local value=$(buildkite-agent secret get "$key" ${@:2})
|
||||
if [ -z "$value" ]; then
|
||||
echo "error: Cannot find $key secret"
|
||||
echo ""
|
||||
echo "hint: Create a secret named $key with a value:"
|
||||
echo "https://buildkite.com/docs/pipelines/buildkite-secrets"
|
||||
exit 1
|
||||
fi
|
||||
export "$key"="$value"
|
||||
}
|
||||
|
||||
function release_tag() {
|
||||
local version="$1"
|
||||
if [ "$version" == "canary" ]; then
|
||||
echo "canary"
|
||||
else
|
||||
echo "bun-v$version"
|
||||
fi
|
||||
}
|
||||
|
||||
function create_sentry_release() {
|
||||
local version="$1"
|
||||
local release="$version"
|
||||
if [ "$version" == "canary" ]; then
|
||||
release="$BUILDKITE_COMMIT-canary"
|
||||
fi
|
||||
run_command sentry-cli releases new "$release" --finalize
|
||||
run_command sentry-cli releases set-commits "$release" --auto --ignore-missing
|
||||
if [ "$version" == "canary" ]; then
|
||||
run_command sentry-cli deploys new --env="canary" --release="$release"
|
||||
fi
|
||||
}
|
||||
|
||||
function download_buildkite_artifact() {
|
||||
local name="$1"
|
||||
local dir="$2"
|
||||
if [ -z "$dir" ]; then
|
||||
dir="."
|
||||
fi
|
||||
run_command buildkite-agent artifact download "$name" "$dir"
|
||||
if [ ! -f "$dir/$name" ]; then
|
||||
echo "error: Cannot find Buildkite artifact: $name"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_github_asset() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
local file="$2"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
|
||||
# Sometimes the upload fails, maybe this is a race condition in the gh CLI?
|
||||
while [ "$(gh release view "$tag" --repo "$BUILDKITE_REPO" | grep -c "$file")" -eq 0 ]; do
|
||||
echo "warn: Uploading $file to $tag failed, retrying..."
|
||||
sleep "$((RANDOM % 5 + 1))"
|
||||
run_command gh release upload "$tag" "$file" --clobber --repo "$BUILDKITE_REPO"
|
||||
done
|
||||
}
|
||||
|
||||
function update_github_release() {
|
||||
local version="$1"
|
||||
local tag="$(release_tag "$version")"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
sleep 5 # There is possibly a race condition where this overwrites artifacts?
|
||||
run_command gh release edit "$tag" --repo "$BUILDKITE_REPO" \
|
||||
--notes "This release of Bun corresponds to the commit: $BUILDKITE_COMMIT"
|
||||
fi
|
||||
}
|
||||
|
||||
function upload_s3_file() {
|
||||
local folder="$1"
|
||||
local file="$2"
|
||||
run_command aws --endpoint-url="$AWS_ENDPOINT" s3 cp "$file" "s3://$AWS_BUCKET/$folder/$file"
|
||||
}
|
||||
|
||||
function send_discord_announcement() {
|
||||
local value=$(buildkite-agent secret get "BUN_ANNOUNCE_CANARY_WEBHOOK_URL")
|
||||
if [ -z "$value" ]; then
|
||||
echo "warn: BUN_ANNOUNCE_CANARY_WEBHOOK_URL not set, skipping Discord announcement"
|
||||
return
|
||||
fi
|
||||
|
||||
local version="$1"
|
||||
local commit="$BUILDKITE_COMMIT"
|
||||
local short_sha="${commit:0:7}"
|
||||
local commit_url="https://github.com/oven-sh/bun/commit/$commit"
|
||||
|
||||
if [ "$version" == "canary" ]; then
|
||||
local json_payload=$(cat <<EOF
|
||||
{
|
||||
"embeds": [{
|
||||
"title": "New Bun Canary now available",
|
||||
"description": "A new canary build of Bun has been automatically uploaded ([${short_sha}](${commit_url})). To upgrade, run:\n\n\`\`\`shell\nbun upgrade --canary\n\`\`\`\nCommit: \`${commit}\`",
|
||||
"color": 16023551,
|
||||
"timestamp": "$(date -u +%Y-%m-%dT%H:%M:%SZ)"
|
||||
}]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
curl -H "Content-Type: application/json" \
|
||||
-d "$json_payload" \
|
||||
-sf \
|
||||
"$value" >/dev/null
|
||||
fi
|
||||
}
|
||||
|
||||
function create_release() {
|
||||
assert_main
|
||||
assert_buildkite_agent
|
||||
assert_github
|
||||
assert_aws
|
||||
assert_sentry
|
||||
|
||||
local tag="$1" # 'canary' or 'x.y.z'
|
||||
local artifacts=(
|
||||
bun-darwin-aarch64.zip
|
||||
bun-darwin-aarch64-profile.zip
|
||||
bun-darwin-x64.zip
|
||||
bun-darwin-x64-profile.zip
|
||||
bun-linux-aarch64.zip
|
||||
bun-linux-aarch64-profile.zip
|
||||
bun-linux-x64.zip
|
||||
bun-linux-x64-profile.zip
|
||||
bun-linux-x64-baseline.zip
|
||||
bun-linux-x64-baseline-profile.zip
|
||||
bun-linux-aarch64-musl.zip
|
||||
bun-linux-aarch64-musl-profile.zip
|
||||
bun-linux-x64-musl.zip
|
||||
bun-linux-x64-musl-profile.zip
|
||||
bun-linux-x64-musl-baseline.zip
|
||||
bun-linux-x64-musl-baseline-profile.zip
|
||||
bun-windows-x64.zip
|
||||
bun-windows-x64-profile.zip
|
||||
bun-windows-x64-baseline.zip
|
||||
bun-windows-x64-baseline-profile.zip
|
||||
)
|
||||
|
||||
function upload_artifact() {
|
||||
local artifact="$1"
|
||||
download_buildkite_artifact "$artifact"
|
||||
if [ "$tag" == "canary" ]; then
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT-canary" "$artifact" &
|
||||
else
|
||||
upload_s3_file "releases/$BUILDKITE_COMMIT" "$artifact" &
|
||||
fi
|
||||
upload_s3_file "releases/$tag" "$artifact" &
|
||||
upload_github_asset "$tag" "$artifact" &
|
||||
wait
|
||||
}
|
||||
|
||||
for artifact in "${artifacts[@]}"; do
|
||||
upload_artifact "$artifact"
|
||||
done
|
||||
|
||||
update_github_release "$tag"
|
||||
create_sentry_release "$tag"
|
||||
send_discord_announcement "$tag"
|
||||
}
|
||||
|
||||
function assert_canary() {
|
||||
if [ -z "$CANARY" ] || [ "$CANARY" == "0" ]; then
|
||||
echo "warn: Skipping release because this is not a canary build"
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
assert_canary
|
||||
create_release "canary"
|
||||
@@ -1,9 +0,0 @@
|
||||
WarningsAsErrors: "*"
|
||||
FormatStyle: webkit
|
||||
Checks: >
|
||||
-*,
|
||||
clang-analyzer-*,
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.UncountedLambdaCapturesChecker
|
||||
-clang-analyzer-optin.core.EnumCastOutOfRange
|
||||
-clang-analyzer-webkit.RefCntblBaseVirtualDtor
|
||||
8
.clangd
8
.clangd
@@ -1,8 +0,0 @@
|
||||
Index:
|
||||
Background: Skip # Disable slow background indexing of these files.
|
||||
|
||||
CompileFlags:
|
||||
CompilationDatabase: build/debug
|
||||
|
||||
Diagnostics:
|
||||
UnusedIncludes: None
|
||||
@@ -1,27 +0,0 @@
|
||||
---
|
||||
description: How to build Bun
|
||||
globs:
|
||||
---
|
||||
# How to build Bun
|
||||
|
||||
## CMake
|
||||
|
||||
Bun is built using CMake, which you can find in `CMakeLists.txt` and in the `cmake/` directory.
|
||||
|
||||
* `CMakeLists.txt`
|
||||
* `cmake/`
|
||||
* `Globals.cmake` - macros and functions used by all the other files
|
||||
* `Options.cmake` - build options for configuring the build (e.g. debug/release mode)
|
||||
* `CompilerFlags.cmake` - compiler and linker flags used by all the targets
|
||||
* `tools/` - setup scripts for various build tools (e.g. llvm, zig, webkit, rust, etc.)
|
||||
* `targets/` - targets for bun and its dependencies (e.g. brotli, boringssl, libuv, etc.)
|
||||
|
||||
## How to
|
||||
|
||||
There are `package.json` scripts that make it easy to build Bun without calling CMake directly, for example:
|
||||
|
||||
```sh
|
||||
bun run build # builds a debug build: `build/debug/bun-debug`
|
||||
bun run build:release # builds a release build: `build/release/bun`
|
||||
bun run build:assert # builds a release build with debug assertions: `build/assert/bun`
|
||||
```
|
||||
@@ -1,139 +0,0 @@
|
||||
---
|
||||
description: Writing HMR/Dev Server tests
|
||||
globs: test/bake/*
|
||||
---
|
||||
|
||||
# Writing HMR/Dev Server tests
|
||||
|
||||
Dev server tests validate that hot-reloading is robust, correct, and reliable. Remember to write thorough, yet concise tests.
|
||||
|
||||
## File Structure
|
||||
|
||||
- `test/bake/bake-harness.ts` - shared utilities and test harness
|
||||
- primary test functions `devTest` / `prodTest` / `devAndProductionTest`
|
||||
- class `Dev` (controls subprocess for dev server)
|
||||
- class `Client` (controls a happy-dom subprocess for having the page open)
|
||||
- more helpers
|
||||
- `test/bake/client-fixture.mjs` - subprocess for what `Client` controls. it loads a page and uses IPC to query parts of the page, run javascript, and much more.
|
||||
- `test/bake/dev/*.test.ts` - these call `devTest` to test dev server and hot reloading
|
||||
- `test/bake/dev-and-prod.ts` - these use `devAndProductionTest` to run the same test on dev and production mode. these tests cannot really test hot reloading for obvious reasons.
|
||||
|
||||
## Categories
|
||||
|
||||
bundle.test.ts - Bundle tests are tests concerning bundling bugs that only occur in DevServer.
|
||||
css.test.ts - CSS tests concern bundling bugs with CSS files
|
||||
plugins.test.ts - Plugin tests concern plugins in development mode.
|
||||
ecosystem.test.ts - These tests involve ensuring certain libraries are correct. It is preferred to test more concrete bugs than testing entire packages.
|
||||
esm.test.ts - ESM tests are about various esm features in development mode.
|
||||
html.test.ts - HTML tests are tests relating to HTML files themselves.
|
||||
react-spa.test.ts - Tests relating to React, our react-refresh transform, and basic server component transforms.
|
||||
sourcemap.test.ts - Tests verifying source-maps are correct.
|
||||
|
||||
## `devTest` Basics
|
||||
|
||||
A test takes in two primary inputs: `files` and `async test(dev) {`
|
||||
|
||||
```ts
|
||||
import { devTest, emptyHtmlFile } from "../bake-harness";
|
||||
|
||||
devTest("html file is watched", {
|
||||
files: {
|
||||
"index.html": emptyHtmlFile({
|
||||
scripts: ["/script.ts"],
|
||||
body: "<h1>Hello</h1>",
|
||||
}),
|
||||
"script.ts": `
|
||||
console.log("hello");
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "World",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>World</h1>");
|
||||
|
||||
// Works
|
||||
await using c = await dev.client("/");
|
||||
await c.expectMessage("hello");
|
||||
|
||||
// Editing HTML reloads
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "World",
|
||||
replace: "Hello",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Hello</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("index.html", {
|
||||
find: "Hello",
|
||||
replace: "Bar",
|
||||
});
|
||||
await dev.fetch("/").expect.toInclude("<h1>Bar</h1>");
|
||||
});
|
||||
await c.expectMessage("hello");
|
||||
|
||||
await c.expectReload(async () => {
|
||||
await dev.patch("script.ts", {
|
||||
find: "hello",
|
||||
replace: "world",
|
||||
});
|
||||
});
|
||||
await c.expectMessage("world");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
`files` holds the initial state, and the callback runs with the server running. `dev.fetch()` runs HTTP requests, while `dev.client()` opens a browser instance to the code.
|
||||
|
||||
Functions `dev.write` and `dev.patch` and `dev.delete` mutate the filesystem. Do not use `node:fs` APIs, as the dev server ones are hooked to wait for hot-reload, and all connected clients to recieve changes.
|
||||
|
||||
When a change performs a hard-reload, that must be explicitly annotated with `expectReload`. This tells `client-fixture.mjs` that the test is meant to reload the page once; All other hard reloads automatically fail the test.
|
||||
|
||||
Client's have `console.log` instrumented, so that any unasserted logs fail the test. This makes it more obvious when an extra reload or re-evaluation. Messages are awaited via `c.expectMessage("log")` or with multiple arguments if there are multiple logs.
|
||||
|
||||
## Testing for bundling errors
|
||||
|
||||
By default, a client opening a page to an error will fail the test. This makes testing errors explicit.
|
||||
|
||||
```ts
|
||||
devTest("import then create", {
|
||||
files: {
|
||||
"index.html": `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<script type="module" src="/script.ts"></script>
|
||||
</body>
|
||||
</html>
|
||||
`,
|
||||
"script.ts": `
|
||||
import data from "./data";
|
||||
console.log(data);
|
||||
`,
|
||||
},
|
||||
async test(dev) {
|
||||
const c = await dev.client("/", {
|
||||
errors: ['script.ts:1:18: error: Could not resolve: "./data"'],
|
||||
});
|
||||
await c.expectReload(async () => {
|
||||
await dev.write("data.ts", "export default 'data';");
|
||||
});
|
||||
await c.expectMessage("data");
|
||||
},
|
||||
});
|
||||
```
|
||||
|
||||
Many functions take an options value to allow specifying it will produce errors. For example, this delete is going to cause a resolution failure.
|
||||
|
||||
```ts
|
||||
await dev.delete("other.ts", {
|
||||
errors: ['index.ts:1:16: error: Could not resolve: "./other"'],
|
||||
});
|
||||
```
|
||||
@@ -1,414 +0,0 @@
|
||||
---
|
||||
description: JavaScript class implemented in C++
|
||||
globs: *.cpp
|
||||
alwaysApply: false
|
||||
---
|
||||
|
||||
# Implementing JavaScript classes in C++
|
||||
|
||||
If there is a publicly accessible Constructor and Prototype, then there are 3 classes:
|
||||
|
||||
- IF there are C++ class members we need a destructor, so `class Foo : public JSC::DestructibleObject`, if no C++ class fields (only JS properties) then we don't need a class at all usually. We can instead use JSC::constructEmptyObject(vm, structure) and `putDirectOffset` like in [NodeFSStatBinding.cpp](mdc:src/bun.js/bindings/NodeFSStatBinding.cpp).
|
||||
- class FooPrototype : public JSC::JSNonFinalObject
|
||||
- class FooConstructor : public JSC::InternalFunction
|
||||
|
||||
If there is no publicly accessible Constructor, just the Prototype and the class is necessary. In some cases, we can avoid the prototype entirely (but that's rare).
|
||||
|
||||
If there are C++ fields on the Foo class, the Foo class will need an iso subspace added to [DOMClientIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMClientIsoSubspaces.h) and [DOMIsoSubspaces.h](mdc:src/bun.js/bindings/webcore/DOMIsoSubspaces.h). Prototype and Constructor do not need subspaces.
|
||||
|
||||
Usually you'll need to #include "root.h" at the top of C++ files or you'll get lint errors.
|
||||
|
||||
Generally, defining the subspace looks like this:
|
||||
|
||||
```c++
|
||||
|
||||
class Foo : public JSC::DestructibleObject {
|
||||
|
||||
// ...
|
||||
|
||||
template<typename MyClassT, JSC::SubspaceAccess mode>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
if constexpr (mode == JSC::SubspaceAccess::Concurrently)
|
||||
return nullptr;
|
||||
return WebCore::subspaceForImpl<MyClassT, WebCore::UseCustomHeapCellType::No>(
|
||||
vm,
|
||||
[](auto& spaces) { return spaces.m_clientSubspaceFor${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_clientSubspaceFor${MyClassT} = std::forward<decltype(space)>(space); },
|
||||
[](auto& spaces) { return spaces.m_subspaceFo${MyClassT}.get(); },
|
||||
[](auto& spaces, auto&& space) { spaces.m_subspaceFor${MyClassT} = std::forward<decltype(space)>(space); });
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
It's better to put it in the .cpp file instead of the .h file, when possible.
|
||||
|
||||
## Defining properties
|
||||
|
||||
Define properties on the prototype. Use a const HashTableValues like this:
|
||||
|
||||
```C++
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckEmail);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckHost);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIP);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckIssued);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncCheckPrivateKey);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToLegacyObject);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncToString);
|
||||
static JSC_DECLARE_HOST_FUNCTION(jsX509CertificateProtoFuncVerify);
|
||||
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_ca);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint256);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_fingerprint512);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subject);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_subjectAltName);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_infoAccess);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_keyUsage);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuer);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_issuerCertificate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_publicKey);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_raw);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_serialNumber);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFrom);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validTo);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validFromDate);
|
||||
static JSC_DECLARE_CUSTOM_GETTER(jsX509CertificateGetter_validToDate);
|
||||
|
||||
static const HashTableValue JSX509CertificatePrototypeTableValues[] = {
|
||||
{ "ca"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_ca, 0 } },
|
||||
{ "checkEmail"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckEmail, 2 } },
|
||||
{ "checkHost"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckHost, 2 } },
|
||||
{ "checkIP"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIP, 1 } },
|
||||
{ "checkIssued"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckIssued, 1 } },
|
||||
{ "checkPrivateKey"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncCheckPrivateKey, 1 } },
|
||||
{ "fingerprint"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint, 0 } },
|
||||
{ "fingerprint256"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint256, 0 } },
|
||||
{ "fingerprint512"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_fingerprint512, 0 } },
|
||||
{ "infoAccess"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_infoAccess, 0 } },
|
||||
{ "issuer"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuer, 0 } },
|
||||
{ "issuerCertificate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_issuerCertificate, 0 } },
|
||||
{ "keyUsage"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_keyUsage, 0 } },
|
||||
{ "publicKey"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_publicKey, 0 } },
|
||||
{ "raw"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_raw, 0 } },
|
||||
{ "serialNumber"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_serialNumber, 0 } },
|
||||
{ "subject"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subject, 0 } },
|
||||
{ "subjectAltName"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_subjectAltName, 0 } },
|
||||
{ "toJSON"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToJSON, 0 } },
|
||||
{ "toLegacyObject"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToLegacyObject, 0 } },
|
||||
{ "toString"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncToString, 0 } },
|
||||
{ "validFrom"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFrom, 0 } },
|
||||
{ "validFromDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validFromDate, 0 } },
|
||||
{ "validTo"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessor), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validTo, 0 } },
|
||||
{ "validToDate"_s, static_cast<unsigned>(PropertyAttribute::ReadOnly | PropertyAttribute::CustomAccessorOrValue), NoIntrinsic, { HashTableValue::GetterSetterType, jsX509CertificateGetter_validToDate, 0 } },
|
||||
{ "verify"_s, static_cast<unsigned>(PropertyAttribute::Function), NoIntrinsic, { HashTableValue::NativeFunctionType, jsX509CertificateProtoFuncVerify, 1 } },
|
||||
};
|
||||
```
|
||||
|
||||
### Creating a prototype class
|
||||
|
||||
Follow a pattern like this:
|
||||
|
||||
```c++
|
||||
class JSX509CertificatePrototype final : public JSC::JSNonFinalObject {
|
||||
public:
|
||||
using Base = JSC::JSNonFinalObject;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSX509CertificatePrototype* create(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::Structure* structure)
|
||||
{
|
||||
JSX509CertificatePrototype* prototype = new (NotNull, allocateCell<JSX509CertificatePrototype>(vm)) JSX509CertificatePrototype(vm, structure);
|
||||
prototype->finishCreation(vm);
|
||||
return prototype;
|
||||
}
|
||||
|
||||
template<typename, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.plainObjectSpace();
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
auto* structure = JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::ObjectType, StructureFlags), info());
|
||||
structure->setMayBePrototype(true);
|
||||
return structure;
|
||||
}
|
||||
|
||||
private:
|
||||
JSX509CertificatePrototype(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm);
|
||||
};
|
||||
|
||||
const ClassInfo JSX509CertificatePrototype::s_info = { "X509Certificate"_s, &Base::s_info, nullptr, nullptr, CREATE_METHOD_TABLE(JSX509CertificatePrototype) };
|
||||
|
||||
void JSX509CertificatePrototype::finishCreation(VM& vm)
|
||||
{
|
||||
Base::finishCreation(vm);
|
||||
reifyStaticProperties(vm, JSX509Certificate::info(), JSX509CertificatePrototypeTableValues, *this);
|
||||
JSC_TO_STRING_TAG_WITHOUT_TRANSITION();
|
||||
}
|
||||
|
||||
} // namespace Bun
|
||||
```
|
||||
|
||||
### Getter definition:
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DEFINE_CUSTOM_GETTER(jsX509CertificateGetter_ca, (JSGlobalObject * globalObject, EncodedJSValue thisValue, PropertyName))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
JSX509Certificate* thisObject = jsDynamicCast<JSX509Certificate*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "JSX509Certificate"_s, "ca"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(jsBoolean(thisObject->view().isCA()));
|
||||
}
|
||||
```
|
||||
|
||||
### Setter definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_CUSTOM_SETTER(jsImportMetaObjectSetter_require, (JSGlobalObject * jsGlobalObject, JSC::EncodedJSValue thisValue, JSC::EncodedJSValue encodedValue, PropertyName propertyName))
|
||||
{
|
||||
ImportMetaObject* thisObject = jsDynamicCast<ImportMetaObject*>(JSValue::decode(thisValue));
|
||||
if (UNLIKELY(!thisObject))
|
||||
return false;
|
||||
|
||||
JSValue value = JSValue::decode(encodedValue);
|
||||
if (!value.isCell()) {
|
||||
// TODO:
|
||||
return true;
|
||||
}
|
||||
|
||||
thisObject->requireProperty.set(thisObject->vm(), thisObject, value.asCell());
|
||||
return true;
|
||||
}
|
||||
```
|
||||
|
||||
### Function definition
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(jsX509CertificateProtoFuncToJSON, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
auto *thisObject = jsDynamicCast<MyClassT*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
Bun::throwThisTypeError(*globalObject, scope, "MyClass"_s, "myFunctionName"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
return JSValue::encode(functionThatReturnsJSValue(vm, globalObject, thisObject));
|
||||
}
|
||||
```
|
||||
|
||||
### Constructor definition
|
||||
|
||||
```C++
|
||||
|
||||
JSC_DECLARE_HOST_FUNCTION(callStats);
|
||||
JSC_DECLARE_HOST_FUNCTION(constructStats);
|
||||
|
||||
class JSStatsConstructor final : public JSC::InternalFunction {
|
||||
public:
|
||||
using Base = JSC::InternalFunction;
|
||||
static constexpr unsigned StructureFlags = Base::StructureFlags;
|
||||
|
||||
static JSStatsConstructor* create(JSC::VM& vm, JSC::Structure* structure, JSC::JSObject* prototype)
|
||||
{
|
||||
JSStatsConstructor* constructor = new (NotNull, JSC::allocateCell<JSStatsConstructor>(vm)) JSStatsConstructor(vm, structure);
|
||||
constructor->finishCreation(vm, prototype);
|
||||
return constructor;
|
||||
}
|
||||
|
||||
DECLARE_INFO;
|
||||
|
||||
template<typename CellType, JSC::SubspaceAccess>
|
||||
static JSC::GCClient::IsoSubspace* subspaceFor(JSC::VM& vm)
|
||||
{
|
||||
return &vm.internalFunctionSpace();
|
||||
}
|
||||
|
||||
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype)
|
||||
{
|
||||
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
|
||||
}
|
||||
|
||||
private:
|
||||
JSStatsConstructor(JSC::VM& vm, JSC::Structure* structure)
|
||||
: Base(vm, structure, callStats, constructStats)
|
||||
{
|
||||
}
|
||||
|
||||
void finishCreation(JSC::VM& vm, JSC::JSObject* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "Stats"_s);
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, JSC::PropertyAttribute::DontEnum | JSC::PropertyAttribute::DontDelete | JSC::PropertyAttribute::ReadOnly);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
### Structure caching
|
||||
|
||||
If there's a class, prototype, and constructor:
|
||||
|
||||
1. Add the `JSC::LazyClassStructure` to [ZigGlobalObject.h](mdc:src/bun.js/bindings/ZigGlobalObject.h)
|
||||
2. Initialize the class structure in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the class structure in visitChildren in [ZigGlobalObject.cpp](mdc:src/bun.js/bindings/ZigGlobalObject.cpp) in `void GlobalObject::visitChildrenImpl`
|
||||
|
||||
```c++#ZigGlobalObject.cpp
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
m_JSStatsBigIntClassStructure.initLater(
|
||||
[](LazyClassStructure::Initializer& init) {
|
||||
// Call the function to initialize our class structure.
|
||||
Bun::initJSBigIntStatsClassStructure(init);
|
||||
});
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
|
||||
```c++
|
||||
void setupX509CertificateClassStructure(LazyClassStructure::Initializer& init)
|
||||
{
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
auto* constructorStructure = JSX509CertificateConstructor::createStructure(init.vm, init.global, init.global->functionPrototype());
|
||||
|
||||
auto* constructor = JSX509CertificateConstructor::create(init.vm, init.global, constructorStructure, prototype);
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
If there's only a class, use `JSC::LazyProperty<JSGlobalObject, Structure>` instead of `JSC::LazyClassStructure`:
|
||||
|
||||
1. Add the `JSC::LazyProperty<JSGlobalObject, Structure>` to @ZigGlobalObject.h
|
||||
2. Initialize the class structure in @ZigGlobalObject.cpp in `void GlobalObject::finishCreation(VM& vm)`
|
||||
3. Visit the lazy property in visitChildren in @ZigGlobalObject.cpp in `void GlobalObject::visitChildrenImpl`
|
||||
void GlobalObject::finishCreation(VM& vm) {
|
||||
// ...
|
||||
this.m_myLazyProperty.initLater([](const JSC::LazyProperty<JSC::JSGlobalObject, JSC::Structure>::Initializer& init) {
|
||||
init.set(Bun::initMyStructure(init.vm, reinterpret_cast<Zig::GlobalObject\*>(init.owner)));
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
Then, implement the function that creates the structure:
|
||||
```c++
|
||||
Structure* setupX509CertificateStructure(JSC::VM &vm, Zig::GlobalObject* globalObject)
|
||||
{
|
||||
// If there is a prototype:
|
||||
auto* prototypeStructure = JSX509CertificatePrototype::createStructure(init.vm, init.global, init.global->objectPrototype());
|
||||
auto* prototype = JSX509CertificatePrototype::create(init.vm, init.global, prototypeStructure);
|
||||
|
||||
// If there is no prototype or it only has
|
||||
|
||||
auto* structure = JSX509Certificate::createStructure(init.vm, init.global, prototype);
|
||||
init.setPrototype(prototype);
|
||||
init.setStructure(structure);
|
||||
init.setConstructor(constructor);
|
||||
}
|
||||
```
|
||||
|
||||
Then, use the structure by calling `globalObject.m_myStructureName.get(globalObject)`
|
||||
|
||||
```C++
|
||||
JSC_DEFINE_HOST_FUNCTION(x509CertificateConstructorConstruct, (JSGlobalObject * globalObject, CallFrame* callFrame))
|
||||
{
|
||||
VM& vm = globalObject->vm();
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
|
||||
if (!callFrame->argumentCount()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_MISSING_ARGS, "X509Certificate constructor requires at least one argument"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
JSValue arg = callFrame->uncheckedArgument(0);
|
||||
if (!arg.isCell()) {
|
||||
Bun::throwError(globalObject, scope, ErrorCode::ERR_INVALID_ARG_TYPE, "X509Certificate constructor argument must be a Buffer, TypedArray, or string"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* zigGlobalObject = defaultGlobalObject(globalObject);
|
||||
Structure* structure = zigGlobalObject->m_JSX509CertificateClassStructure.get(zigGlobalObject);
|
||||
JSValue newTarget = callFrame->newTarget();
|
||||
if (UNLIKELY(zigGlobalObject->m_JSX509CertificateClassStructure.constructor(zigGlobalObject) != newTarget)) {
|
||||
auto scope = DECLARE_THROW_SCOPE(vm);
|
||||
if (!newTarget) {
|
||||
throwTypeError(globalObject, scope, "Class constructor X509Certificate cannot be invoked without 'new'"_s);
|
||||
return {};
|
||||
}
|
||||
|
||||
auto* functionGlobalObject = defaultGlobalObject(getFunctionRealm(globalObject, newTarget.getObject()));
|
||||
RETURN_IF_EXCEPTION(scope, {});
|
||||
structure = InternalFunction::createSubclassStructure(
|
||||
globalObject, newTarget.getObject(), functionGlobalObject->NodeVMScriptStructure());
|
||||
scope.release();
|
||||
}
|
||||
|
||||
return JSValue::encode(createX509Certificate(vm, globalObject, structure, arg));
|
||||
}
|
||||
```
|
||||
|
||||
### Expose to Zig
|
||||
|
||||
To expose the constructor to zig:
|
||||
|
||||
```c++
|
||||
extern "C" JSC::EncodedJSValue Bun__JSBigIntStatsObjectConstructor(Zig::GlobalObject* globalobject)
|
||||
{
|
||||
return JSValue::encode(globalobject->m_JSStatsBigIntClassStructure.constructor(globalobject));
|
||||
}
|
||||
```
|
||||
|
||||
Zig:
|
||||
|
||||
```zig
|
||||
extern "c" fn Bun__JSBigIntStatsObjectConstructor(*JSC.JSGlobalObject) JSC.JSValue;
|
||||
pub const getBigIntStatsConstructor = Bun__JSBigIntStatsObjectConstructor;
|
||||
```
|
||||
|
||||
To create an object (instance) of a JS class defined in C++ from Zig, follow the \_\_toJS convention like this:
|
||||
|
||||
```c++
|
||||
// X509* is whatever we need to create the object
|
||||
extern "C" EncodedJSValue Bun__X509__toJS(Zig::GlobalObject* globalObject, X509* cert)
|
||||
{
|
||||
// ... implementation details
|
||||
auto* structure = globalObject->m_JSX509CertificateClassStructure.get(globalObject);
|
||||
return JSValue::encode(JSX509Certificate::create(globalObject->vm(), structure, globalObject, WTFMove(cert)));
|
||||
}
|
||||
```
|
||||
|
||||
And from Zig:
|
||||
|
||||
```zig
|
||||
const X509 = opaque {
|
||||
// ... class
|
||||
|
||||
extern fn Bun__X509__toJS(*JSC.JSGlobalObject, *X509) JSC.JSValue;
|
||||
|
||||
pub fn toJS(this: *X509, globalObject: *JSC.JSGlobalObject) JSC.JSValue {
|
||||
return Bun__X509__toJS(globalObject, this);
|
||||
}
|
||||
};
|
||||
```
|
||||
@@ -1,91 +0,0 @@
|
||||
---
|
||||
description: Writing tests for Bun
|
||||
globs:
|
||||
---
|
||||
# Writing tests for Bun
|
||||
|
||||
## Where tests are found
|
||||
|
||||
You'll find all of Bun's tests in the `test/` directory.
|
||||
|
||||
* `test/`
|
||||
* `cli/` - CLI command tests, like `bun install` or `bun init`
|
||||
* `js/` - JavaScript & TypeScript tests
|
||||
* `bun/` - `Bun` APIs tests, seperated by category, for example: `glob/` for `Bun.Glob` tests
|
||||
* `node/` - Node.js module tests, seperated by module, for example: `assert/` for `node:assert` tests
|
||||
* `test/` - Vendored Node.js tests, taken from the Node.js repository (does not conform to Bun's test style)
|
||||
* `web/` - Web API tests, seperated by category, for example: `fetch/` for `Request` and `Response` tests
|
||||
* `third_party/` - npm package tests, to validate that basic usage works in Bun
|
||||
* `napi/` - N-API tests
|
||||
* `v8/` - V8 C++ API tests
|
||||
* `bundler/` - Bundler, transpiler, CSS, and `bun build` tests
|
||||
* `regression/issue/[number]` - Regression tests, always make one when fixing a particular issue
|
||||
|
||||
## How tests are written
|
||||
|
||||
Bun's tests are written as JavaScript and TypeScript files with the Jest-style APIs, like `test`, `describe`, and `expect`. They are tested using Bun's own test runner, `bun test`.
|
||||
|
||||
```js
|
||||
import { describe, test, expect } from "bun:test";
|
||||
import assert, { AssertionError } from "assert";
|
||||
|
||||
describe("assert(expr)", () => {
|
||||
test.each([true, 1, "foo"])(`assert(%p) does not throw`, expr => {
|
||||
expect(() => assert(expr)).not.toThrow();
|
||||
});
|
||||
|
||||
test.each([false, 0, "", null, undefined])(`assert(%p) throws`, expr => {
|
||||
expect(() => assert(expr)).toThrow(AssertionError);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Testing conventions
|
||||
|
||||
* See `test/harness.ts` for common test utilities and helpers
|
||||
* Be rigorous and test for edge-cases and unexpected inputs
|
||||
* Use data-driven tests, e.g. `test.each`, to reduce boilerplate when possible
|
||||
* When you need to test Bun as a CLI, use the following pattern:
|
||||
|
||||
```js
|
||||
import { test, expect } from "bun:test";
|
||||
import { spawn } from "bun";
|
||||
import { bunExe, bunEnv } from "harness";
|
||||
|
||||
test("bun --version", async () => {
|
||||
const { exited, stdout: stdoutStream, stderr: stderrStream } = spawn({
|
||||
cmd: [bunExe(), "--version"],
|
||||
env: bunEnv,
|
||||
stdout: "pipe",
|
||||
stderr: "pipe",
|
||||
});
|
||||
const [ exitCode, stdout, stderr ] = await Promise.all([
|
||||
exited,
|
||||
new Response(stdoutStream).text(),
|
||||
new Response(stderrStream).text(),
|
||||
]);
|
||||
expect({ exitCode, stdout, stderr }).toMatchObject({
|
||||
exitCode: 0,
|
||||
stdout: expect.stringContaining(Bun.version),
|
||||
stderr: "",
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Before writing a test
|
||||
|
||||
* If you are fixing a bug, write the test first and make sure it fails (as expected) with the canary version of Bun
|
||||
* If you are fixing a Node.js compatibility bug, create a throw-away snippet of code and test that it works as you expect in Node.js, then that it fails (as expected) with the canary version of Bun
|
||||
* When the expected behaviour is ambigious, defer to matching what happens in Node.js
|
||||
* Always attempt to find related tests in an existing test file before creating a new test file
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,498 +0,0 @@
|
||||
---
|
||||
description: How Zig works with JavaScriptCore bindings generator
|
||||
globs:
|
||||
alwaysApply: false
|
||||
---
|
||||
# Bun's JavaScriptCore Class Bindings Generator
|
||||
|
||||
This document explains how Bun's class bindings generator works to bridge Zig and JavaScript code through JavaScriptCore (JSC).
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Bun's binding system creates a seamless bridge between JavaScript and Zig, allowing Zig implementations to be exposed as JavaScript classes. The system has several key components:
|
||||
|
||||
1. **Zig Implementation** (.zig files)
|
||||
2. **JavaScript Interface Definition** (.classes.ts files)
|
||||
3. **Generated Code** (C++/Zig files that connect everything)
|
||||
|
||||
## Class Definition Files
|
||||
|
||||
### JavaScript Interface (.classes.ts)
|
||||
|
||||
The `.classes.ts` files define the JavaScript API using a declarative approach:
|
||||
|
||||
```typescript
|
||||
// Example: encoding.classes.ts
|
||||
define({
|
||||
name: "TextDecoder",
|
||||
constructor: true,
|
||||
JSType: "object",
|
||||
finalize: true,
|
||||
proto: {
|
||||
decode: {
|
||||
// Function definition
|
||||
args: 1,
|
||||
},
|
||||
encoding: {
|
||||
// Getter with caching
|
||||
getter: true,
|
||||
cache: true,
|
||||
},
|
||||
fatal: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
},
|
||||
ignoreBOM: {
|
||||
// Read-only property
|
||||
getter: true,
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
Each class definition specifies:
|
||||
- The class name
|
||||
- Whether it has a constructor
|
||||
- JavaScript type (object, function, etc.)
|
||||
- Properties and methods in the `proto` field
|
||||
- Caching strategy for properties
|
||||
- Finalization requirements
|
||||
|
||||
### Zig Implementation (.zig)
|
||||
|
||||
The Zig files implement the native functionality:
|
||||
|
||||
```zig
|
||||
// Example: TextDecoder.zig
|
||||
pub const TextDecoder = struct {
|
||||
// Expose generated bindings as `js` namespace with trait conversion methods
|
||||
pub const js = JSC.Codegen.JSTextDecoder;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// Internal state
|
||||
encoding: []const u8,
|
||||
fatal: bool,
|
||||
ignoreBOM: bool,
|
||||
|
||||
// Constructor implementation - note use of globalObject
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*TextDecoder {
|
||||
// Implementation
|
||||
|
||||
return bun.new(TextDecoder, .{
|
||||
// Fields
|
||||
});
|
||||
}
|
||||
|
||||
// Prototype methods - note return type includes JSError
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getters
|
||||
pub fn getEncoding(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.createStringFromUTF8(globalObject, this.encoding);
|
||||
}
|
||||
|
||||
pub fn getFatal(this: *TextDecoder, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsBoolean(this.fatal);
|
||||
}
|
||||
|
||||
// Cleanup - note standard pattern of using deinit/deref
|
||||
fn deinit(this: *TextDecoder) void {
|
||||
// Release any retained resources
|
||||
// Free the pointer at the end.
|
||||
bun.destroy(this);
|
||||
}
|
||||
|
||||
// Finalize - called by JS garbage collector. This should call deinit, or deref if reference counted.
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
this.deinit();
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
Key components in the Zig file:
|
||||
- The struct containing native state
|
||||
- `pub const js = JSC.Codegen.JS<ClassName>` to include generated code
|
||||
- Constructor and methods using `bun.JSError!JSValue` return type for proper error handling
|
||||
- Consistent use of `globalObject` parameter name instead of `ctx`
|
||||
- Methods matching the JavaScript interface
|
||||
- Getters/setters for properties
|
||||
- Proper resource cleanup pattern with `deinit()` and `finalize()`
|
||||
|
||||
## Code Generation System
|
||||
|
||||
The binding generator produces C++ code that connects JavaScript and Zig:
|
||||
|
||||
1. **JSC Class Structure**: Creates C++ classes for the JS object, prototype, and constructor
|
||||
2. **Memory Management**: Handles GC integration through JSC's WriteBarrier
|
||||
3. **Method Binding**: Connects JS function calls to Zig implementations
|
||||
4. **Type Conversion**: Converts between JS values and Zig types
|
||||
5. **Property Caching**: Implements the caching system for properties
|
||||
|
||||
The generated C++ code includes:
|
||||
- A JSC wrapper class (`JSTextDecoder`)
|
||||
- A prototype class (`JSTextDecoderPrototype`)
|
||||
- A constructor function (`JSTextDecoderConstructor`)
|
||||
- Function bindings (`TextDecoderPrototype__decodeCallback`)
|
||||
- Property getters/setters (`TextDecoderPrototype__encodingGetterWrap`)
|
||||
|
||||
## CallFrame Access
|
||||
|
||||
The `CallFrame` object provides access to JavaScript execution context:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Get arguments
|
||||
const input = callFrame.argument(0);
|
||||
const options = callFrame.argument(1);
|
||||
|
||||
// Get this value
|
||||
const thisValue = callFrame.thisValue();
|
||||
|
||||
// Implementation with error handling
|
||||
if (input.isUndefinedOrNull()) {
|
||||
return globalObject.throw("Input cannot be null or undefined", .{});
|
||||
}
|
||||
|
||||
// Return value or throw error
|
||||
return JSC.JSValue.jsString(globalObject, "result");
|
||||
}
|
||||
```
|
||||
|
||||
CallFrame methods include:
|
||||
- `argument(i)`: Get the i-th argument
|
||||
- `argumentCount()`: Get the number of arguments
|
||||
- `thisValue()`: Get the `this` value
|
||||
- `callee()`: Get the function being called
|
||||
|
||||
## Property Caching and GC-Owned Values
|
||||
|
||||
The `cache: true` option in property definitions enables JSC's WriteBarrier to efficiently store values:
|
||||
|
||||
```typescript
|
||||
encoding: {
|
||||
getter: true,
|
||||
cache: true, // Enable caching
|
||||
}
|
||||
```
|
||||
|
||||
### C++ Implementation
|
||||
|
||||
In the generated C++ code, caching uses JSC's WriteBarrier:
|
||||
|
||||
```cpp
|
||||
JSC_DEFINE_CUSTOM_GETTER(TextDecoderPrototype__encodingGetterWrap, (...)) {
|
||||
auto& vm = JSC::getVM(lexicalGlobalObject);
|
||||
Zig::GlobalObject *globalObject = reinterpret_cast<Zig::GlobalObject*>(lexicalGlobalObject);
|
||||
auto throwScope = DECLARE_THROW_SCOPE(vm);
|
||||
JSTextDecoder* thisObject = jsCast<JSTextDecoder*>(JSValue::decode(encodedThisValue));
|
||||
JSC::EnsureStillAliveScope thisArg = JSC::EnsureStillAliveScope(thisObject);
|
||||
|
||||
// Check for cached value and return if present
|
||||
if (JSValue cachedValue = thisObject->m_encoding.get())
|
||||
return JSValue::encode(cachedValue);
|
||||
|
||||
// Get value from Zig implementation
|
||||
JSC::JSValue result = JSC::JSValue::decode(
|
||||
TextDecoderPrototype__getEncoding(thisObject->wrapped(), globalObject)
|
||||
);
|
||||
RETURN_IF_EXCEPTION(throwScope, {});
|
||||
|
||||
// Store in cache for future access
|
||||
thisObject->m_encoding.set(vm, thisObject, result);
|
||||
RELEASE_AND_RETURN(throwScope, JSValue::encode(result));
|
||||
}
|
||||
```
|
||||
|
||||
### Zig Accessor Functions
|
||||
|
||||
For each cached property, the generator creates Zig accessor functions that allow Zig code to work with these GC-owned values:
|
||||
|
||||
```zig
|
||||
// External function declarations
|
||||
extern fn TextDecoderPrototype__encodingSetCachedValue(JSC.JSValue, *JSC.JSGlobalObject, JSC.JSValue) callconv(JSC.conv) void;
|
||||
extern fn TextDecoderPrototype__encodingGetCachedValue(JSC.JSValue) callconv(JSC.conv) JSC.JSValue;
|
||||
|
||||
/// `TextDecoder.encoding` setter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void {
|
||||
JSC.markBinding(@src());
|
||||
TextDecoderPrototype__encodingSetCachedValue(thisValue, globalObject, value);
|
||||
}
|
||||
|
||||
/// `TextDecoder.encoding` getter
|
||||
/// This value will be visited by the garbage collector.
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue {
|
||||
JSC.markBinding(@src());
|
||||
const result = TextDecoderPrototype__encodingGetCachedValue(thisValue);
|
||||
if (result == .zero)
|
||||
return null;
|
||||
|
||||
return result;
|
||||
}
|
||||
```
|
||||
|
||||
### Benefits of GC-Owned Values
|
||||
|
||||
This system provides several key benefits:
|
||||
|
||||
1. **Automatic Memory Management**: The JavaScriptCore GC tracks and manages these values
|
||||
2. **Proper Garbage Collection**: The WriteBarrier ensures values are properly visited during GC
|
||||
3. **Consistent Access**: Zig code can easily get/set these cached JS values
|
||||
4. **Performance**: Cached values avoid repeated computation or serialization
|
||||
|
||||
### Use Cases
|
||||
|
||||
GC-owned cached values are particularly useful for:
|
||||
|
||||
1. **Computed Properties**: Store expensive computation results
|
||||
2. **Lazily Created Objects**: Create objects only when needed, then cache them
|
||||
3. **References to Other Objects**: Store references to other JS objects that need GC tracking
|
||||
4. **Memoization**: Cache results based on input parameters
|
||||
|
||||
The WriteBarrier mechanism ensures that any JS values stored in this way are properly tracked by the garbage collector.
|
||||
|
||||
## Memory Management and Finalization
|
||||
|
||||
The binding system handles memory management across the JavaScript/Zig boundary:
|
||||
|
||||
1. **Object Creation**: JavaScript `new TextDecoder()` creates both a JS wrapper and a Zig struct
|
||||
2. **Reference Tracking**: JSC's GC tracks all JS references to the object
|
||||
3. **Finalization**: When the JS object is collected, the finalizer releases Zig resources
|
||||
|
||||
Bun uses a consistent pattern for resource cleanup:
|
||||
|
||||
```zig
|
||||
// Resource cleanup method - separate from finalization
|
||||
pub fn deinit(this: *TextDecoder) void {
|
||||
// Release resources like strings
|
||||
this._encoding.deref(); // String deref pattern
|
||||
|
||||
// Free any buffers
|
||||
if (this.buffer) |buffer| {
|
||||
bun.default_allocator.free(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
// Called by the GC when object is collected
|
||||
pub fn finalize(this: *TextDecoder) void {
|
||||
JSC.markBinding(@src()); // For debugging
|
||||
this.deinit(); // Clean up resources
|
||||
bun.default_allocator.destroy(this); // Free the object itself
|
||||
}
|
||||
```
|
||||
|
||||
Some objects that hold references to other JS objects use `.deref()` instead:
|
||||
|
||||
```zig
|
||||
pub fn finalize(this: *SocketAddress) void {
|
||||
JSC.markBinding(@src());
|
||||
this._presentation.deref(); // Release references
|
||||
this.destroy();
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling with JSError
|
||||
|
||||
Bun uses `bun.JSError!JSValue` return type for proper error handling:
|
||||
|
||||
```zig
|
||||
pub fn decode(
|
||||
this: *TextDecoder,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Throwing an error
|
||||
if (callFrame.argumentCount() < 1) {
|
||||
return globalObject.throw("Missing required argument", .{});
|
||||
}
|
||||
|
||||
// Or returning a success value
|
||||
return JSC.JSValue.jsString(globalObject, "Success!");
|
||||
}
|
||||
```
|
||||
|
||||
This pattern allows Zig functions to:
|
||||
1. Return JavaScript values on success
|
||||
2. Throw JavaScript exceptions on error
|
||||
3. Propagate errors automatically through the call stack
|
||||
|
||||
## Type Safety and Error Handling
|
||||
|
||||
The binding system includes robust error handling:
|
||||
|
||||
```cpp
|
||||
// Example of type checking in generated code
|
||||
JSTextDecoder* thisObject = jsDynamicCast<JSTextDecoder*>(callFrame->thisValue());
|
||||
if (UNLIKELY(!thisObject)) {
|
||||
scope.throwException(lexicalGlobalObject,
|
||||
Bun::createInvalidThisError(lexicalGlobalObject, callFrame->thisValue(), "TextDecoder"_s));
|
||||
return {};
|
||||
}
|
||||
```
|
||||
|
||||
## Prototypal Inheritance
|
||||
|
||||
The binding system creates proper JavaScript prototype chains:
|
||||
|
||||
1. **Constructor**: JSTextDecoderConstructor with standard .prototype property
|
||||
2. **Prototype**: JSTextDecoderPrototype with methods and properties
|
||||
3. **Instances**: Each JSTextDecoder instance with __proto__ pointing to prototype
|
||||
|
||||
This ensures JavaScript inheritance works as expected:
|
||||
|
||||
```cpp
|
||||
// From generated code
|
||||
void JSTextDecoderConstructor::finishCreation(VM& vm, JSC::JSGlobalObject* globalObject, JSTextDecoderPrototype* prototype)
|
||||
{
|
||||
Base::finishCreation(vm, 0, "TextDecoder"_s, PropertyAdditionMode::WithoutStructureTransition);
|
||||
|
||||
// Set up the prototype chain
|
||||
putDirectWithoutTransition(vm, vm.propertyNames->prototype, prototype, PropertyAttribute::DontEnum | PropertyAttribute::DontDelete | PropertyAttribute::ReadOnly);
|
||||
ASSERT(inherits(info()));
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
The binding system is optimized for performance:
|
||||
|
||||
1. **Direct Pointer Access**: JavaScript objects maintain a direct pointer to Zig objects
|
||||
2. **Property Caching**: WriteBarrier caching avoids repeated native calls for stable properties
|
||||
3. **Memory Management**: JSC garbage collection integrated with Zig memory management
|
||||
4. **Type Conversion**: Fast paths for common JavaScript/Zig type conversions
|
||||
|
||||
## Creating a New Class Binding
|
||||
|
||||
To create a new class binding in Bun:
|
||||
|
||||
1. **Define the class interface** in a `.classes.ts` file:
|
||||
```typescript
|
||||
define({
|
||||
name: "MyClass",
|
||||
constructor: true,
|
||||
finalize: true,
|
||||
proto: {
|
||||
myMethod: {
|
||||
args: 1,
|
||||
},
|
||||
myProperty: {
|
||||
getter: true,
|
||||
cache: true,
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
2. **Implement the native functionality** in a `.zig` file:
|
||||
```zig
|
||||
pub const MyClass = struct {
|
||||
// Generated bindings
|
||||
pub const js = JSC.Codegen.JSMyClass;
|
||||
pub const toJS = js.toJS;
|
||||
pub const fromJS = js.fromJS;
|
||||
pub const fromJSDirect = js.fromJSDirect;
|
||||
|
||||
// State
|
||||
value: []const u8,
|
||||
|
||||
pub const new = bun.TrivialNew(@This());
|
||||
|
||||
// Constructor
|
||||
pub fn constructor(
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!*MyClass {
|
||||
const arg = callFrame.argument(0);
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Method
|
||||
pub fn myMethod(
|
||||
this: *MyClass,
|
||||
globalObject: *JSGlobalObject,
|
||||
callFrame: *JSC.CallFrame,
|
||||
) bun.JSError!JSC.JSValue {
|
||||
// Implementation
|
||||
}
|
||||
|
||||
// Getter
|
||||
pub fn getMyProperty(this: *MyClass, globalObject: *JSGlobalObject) JSC.JSValue {
|
||||
return JSC.JSValue.jsString(globalObject, this.value);
|
||||
}
|
||||
|
||||
// Resource cleanup
|
||||
pub fn deinit(this: *MyClass) void {
|
||||
// Clean up resources
|
||||
}
|
||||
|
||||
pub fn finalize(this: *MyClass) void {
|
||||
this.deinit();
|
||||
bun.destroy(this);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
3. **The binding generator** creates all necessary C++ and Zig glue code to connect JavaScript and Zig, including:
|
||||
- C++ class definitions
|
||||
- Method and property bindings
|
||||
- Memory management utilities
|
||||
- GC integration code
|
||||
|
||||
## Generated Code Structure
|
||||
|
||||
The binding generator produces several components:
|
||||
|
||||
### 1. C++ Classes
|
||||
|
||||
For each Zig class, the system generates:
|
||||
|
||||
- **JS<Class>**: Main wrapper that holds a pointer to the Zig object (`JSTextDecoder`)
|
||||
- **JS<Class>Prototype**: Contains methods and properties (`JSTextDecoderPrototype`)
|
||||
- **JS<Class>Constructor**: Implementation of the JavaScript constructor (`JSTextDecoderConstructor`)
|
||||
|
||||
### 2. C++ Methods and Properties
|
||||
|
||||
- **Method Callbacks**: `TextDecoderPrototype__decodeCallback`
|
||||
- **Property Getters/Setters**: `TextDecoderPrototype__encodingGetterWrap`
|
||||
- **Initialization Functions**: `finishCreation` methods for setting up the class
|
||||
|
||||
### 3. Zig Bindings
|
||||
|
||||
- **External Function Declarations**:
|
||||
```zig
|
||||
extern fn TextDecoderPrototype__decode(*TextDecoder, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.EncodedJSValue;
|
||||
```
|
||||
|
||||
- **Cached Value Accessors**:
|
||||
```zig
|
||||
pub fn encodingGetCached(thisValue: JSC.JSValue) ?JSC.JSValue { ... }
|
||||
pub fn encodingSetCached(thisValue: JSC.JSValue, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { ... }
|
||||
```
|
||||
|
||||
- **Constructor Helpers**:
|
||||
```zig
|
||||
pub fn create(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { ... }
|
||||
```
|
||||
|
||||
### 4. GC Integration
|
||||
|
||||
- **Memory Cost Calculation**: `estimatedSize` method
|
||||
- **Child Visitor Methods**: `visitChildrenImpl` and `visitAdditionalChildren`
|
||||
- **Heap Analysis**: `analyzeHeap` for debugging memory issues
|
||||
|
||||
This architecture makes it possible to implement high-performance native functionality in Zig while exposing a clean, idiomatic JavaScript API to users.
|
||||
@@ -1,7 +0,0 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
bench
|
||||
vendor
|
||||
*-fixture.{js,ts}
|
||||
zig-cache
|
||||
packages/bun-uws/fuzzing
|
||||
build
|
||||
@@ -1,21 +0,0 @@
|
||||
**/*.a
|
||||
**/*.o
|
||||
**/.next
|
||||
**/CMakeCache.txt
|
||||
**/node_modules
|
||||
.git
|
||||
examples
|
||||
node_modules
|
||||
packages/**/bun
|
||||
packages/**/bun-profile
|
||||
src/bun.js/WebKit
|
||||
src/bun.js/WebKit/LayoutTests
|
||||
zig-build
|
||||
.zig-cache
|
||||
zig-out
|
||||
build
|
||||
vendor
|
||||
node_modules
|
||||
*.trace
|
||||
|
||||
packages/bun-uws/fuzzing
|
||||
@@ -1,8 +0,0 @@
|
||||
# https://EditorConfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
end_of_line = lf
|
||||
@@ -1,8 +0,0 @@
|
||||
# Add commits to ignore in `git blame`. This allows large stylistic refactors to
|
||||
# avoid mucking up blames.
|
||||
#
|
||||
# To configure git to use this, run:
|
||||
#
|
||||
# git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||
#
|
||||
4ec410e0d7c5f6a712c323444edbf56b48d432d8 # make @import("bun") work in zig (#19096)
|
||||
57
.gitattributes
vendored
57
.gitattributes
vendored
@@ -1,57 +0,0 @@
|
||||
*.css text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.js text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.jsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.tsx text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.ts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.c text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.cpp text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.cc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.yml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.toml text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.zig text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.rs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.h text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.json text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.lock text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.map text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.md text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mdc text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mjs text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
*.mts text eol=lf whitespace=blank-at-eol,-blank-at-eof,-space-before-tab,tab-in-indent,tabwidth=2
|
||||
|
||||
*.lockb binary diff=lockb
|
||||
|
||||
.vscode/launch.json linguist-generated
|
||||
src/api/schema.d.ts linguist-generated
|
||||
fixture.*.c linguist-generated
|
||||
src/api/schema.js linguist-generated
|
||||
*-fixture* linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.h linguist-generated
|
||||
src/bun.js/bindings/ZigGeneratedCode.cpp linguist-generated
|
||||
src/bun.js/bindings/headers.h linguist-generated
|
||||
src/bun.js/bindings/headers.zig linguist-generated
|
||||
|
||||
packages/bun-uws/fuzzing/seed-corpus/**/* linguist-generated
|
||||
|
||||
src/bun.js/bindings/sqlite/sqlite3.c linguist-vendored
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h linguist-vendored
|
||||
src/bun.js/bindings/simdutf.cpp linguist-vendored
|
||||
src/bun.js/bindings/simdutf.h linguist-vendored
|
||||
|
||||
docs/**/* linguist-documentation
|
||||
|
||||
# Don't count tests in the language stats - https://github.com/github-linguist/linguist/blob/master/docs/overrides.md
|
||||
test/**/* linguist-documentation
|
||||
bench/**/* linguist-documentation
|
||||
examples/**/* linguist-documentation
|
||||
|
||||
vendor/*.c linguist-vendored
|
||||
vendor/brotli/** linguist-vendored
|
||||
|
||||
test/js/node/test/fixtures linguist-vendored
|
||||
test/js/node/test/common linguist-vendored
|
||||
|
||||
test/js/bun/css/files linguist-vendored
|
||||
|
||||
.vscode/*.json linguist-language=JSON-with-Comments
|
||||
src/cli/init/tsconfig.default.json linguist-language=JSON-with-Comments
|
||||
18
.github/CODEOWNERS
vendored
18
.github/CODEOWNERS
vendored
@@ -1,18 +0,0 @@
|
||||
# Project
|
||||
.github/CODEOWNERS @Jarred-Sumner
|
||||
|
||||
# Build system
|
||||
CMakeLists.txt @Electroid
|
||||
cmake/ @Electroid
|
||||
scripts/ @Electroid
|
||||
|
||||
# CI
|
||||
.buildkite/ @Electroid
|
||||
.github/workflows/ @Electroid
|
||||
|
||||
# Debugger protocol
|
||||
packages/bun-inspector-protocol/ @Electroid
|
||||
packages/bun-debug-adapter-protocol/ @Electroid
|
||||
|
||||
# Tests
|
||||
test/expectations.txt @Jarred-Sumner
|
||||
47
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
47
.github/ISSUE_TEMPLATE/2-bug-report.yml
vendored
@@ -1,47 +0,0 @@
|
||||
name: 🐛 Bug Report
|
||||
description: Report an issue that should be fixed
|
||||
labels:
|
||||
- bug
|
||||
- needs triage
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting a bug report. It helps make Bun better.
|
||||
|
||||
If you need help or support using Bun, and are not reporting a bug, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
|
||||
|
||||
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
|
||||
The bug you are experiencing may already have been fixed.
|
||||
|
||||
Please try to include as much information as possible.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Bun is running?
|
||||
description: Copy the output of `bun --revision`
|
||||
- type: input
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Is there anything else you think we should know?
|
||||
@@ -1,45 +0,0 @@
|
||||
name: 🇹 TypeScript Type Bug Report
|
||||
description: Report an issue with TypeScript types
|
||||
labels: [bug, types]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting a bug report. It helps make Bun better.
|
||||
|
||||
If you need help or support using Bun, and are not reporting a bug, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can ask questions in the [`#help`](https://discord.gg/32EtH6p7HN) forum.
|
||||
|
||||
Make sure you are running the [latest](https://bun.sh/docs/installation#upgrading) version of Bun.
|
||||
The bug you are experiencing may already have been fixed.
|
||||
|
||||
Please try to include as much information as possible.
|
||||
|
||||
- type: input
|
||||
attributes:
|
||||
label: What version of Bun is running?
|
||||
description: Copy the output of `bun --revision`
|
||||
- type: input
|
||||
attributes:
|
||||
label: What platform is your computer?
|
||||
description: |
|
||||
For MacOS and Linux: copy the output of `uname -mprs`
|
||||
For Windows: copy the output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in the PowerShell console
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What steps can reproduce the bug?
|
||||
description: Explain the bug and provide a code snippet that can reproduce it.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the expected behavior?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What do you see instead?
|
||||
description: If possible, please provide text instead of a screenshot.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: Is there anything else you think we should know?
|
||||
24
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
24
.github/ISSUE_TEMPLATE/4-feature-request.yml
vendored
@@ -1,24 +0,0 @@
|
||||
name: 🚀 Feature Request
|
||||
description: Suggest an idea, feature, or enhancement
|
||||
labels: [enhancement]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting an idea. It helps make Bun better.
|
||||
|
||||
If you want to discuss Bun, or learn how others are using Bun, please
|
||||
join our [Discord](https://discord.gg/CXdq2DP29u) server, where you can share in the [`#feedback`](https://discord.gg/unwUnHBNqy) channel.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the problem this feature would solve?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the feature you are proposing to solve the problem?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What alternatives have you considered?
|
||||
29
.github/ISSUE_TEMPLATE/5-docs-issue.yml
vendored
29
.github/ISSUE_TEMPLATE/5-docs-issue.yml
vendored
@@ -1,29 +0,0 @@
|
||||
name: 📗 Documentation Issue
|
||||
description: Tell us if there is missing or incorrect documentation
|
||||
labels: [docs]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for submitting a documentation request. It helps make Bun better.
|
||||
|
||||
We are working on moving documentation from the [README](https://github.com/oven-sh/bun#table-of-contents) to a documentation website. Please report as many issues or missing content requests as you can so we can incoperate that in the new documentation.
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: What is the type of issue?
|
||||
multiple: true
|
||||
options:
|
||||
- Documentation is missing
|
||||
- Documentation is incorrect
|
||||
- Documentation is confusing
|
||||
- Example code is not working
|
||||
- Something else
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: What is the issue?
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Where did you find it?
|
||||
description: If possible, please provide the URL(s) where you found this issue.
|
||||
27
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
27
.github/ISSUE_TEMPLATE/6-crash-report.yml
vendored
@@ -1,27 +0,0 @@
|
||||
name: Prefilled crash report
|
||||
description: Report a crash in Bun
|
||||
labels:
|
||||
- crash
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: code
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,34 +0,0 @@
|
||||
name: bun install crash report
|
||||
description: Report a crash in bun install
|
||||
labels:
|
||||
- npm
|
||||
- crash
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
**Thank you so much** for submitting a crash report. You're helping us make Bun more reliable for everyone!
|
||||
- type: textarea
|
||||
id: package_json
|
||||
attributes:
|
||||
label: "`package.json` file"
|
||||
description: "Can you upload your `package.json` file? This helps us reproduce the crash."
|
||||
render: json
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce the crash?
|
||||
description: Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun) or [CodeSandbox](https://codesandbox.io/templates/bun)
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be
|
||||
automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
- type: textarea
|
||||
id: remapped_trace
|
||||
attributes:
|
||||
label: Stack Trace (bun.report)
|
||||
validations:
|
||||
required: true
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +0,0 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: 💬 Ask a Question
|
||||
url: https://discord.com/invite/CXdq2DP29u
|
||||
about: Join our Discord server for questions, support requests, or just to chat.
|
||||
43
.github/actions/bump/action.yml
vendored
43
.github/actions/bump/action.yml
vendored
@@ -1,43 +0,0 @@
|
||||
name: Bump version
|
||||
description: Bump the version of Bun
|
||||
|
||||
inputs:
|
||||
version:
|
||||
description: The most recent version of Bun.
|
||||
required: true
|
||||
type: string
|
||||
token:
|
||||
description: The GitHub token to use for creating a pull request.
|
||||
required: true
|
||||
type: string
|
||||
default: ${{ github.token }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Run Bump
|
||||
shell: bash
|
||||
id: bump
|
||||
run: |
|
||||
set -euo pipefail
|
||||
MESSAGE=$(bun ./scripts/bump.ts patch --last-version=${{ inputs.version }})
|
||||
LATEST=$(cat LATEST)
|
||||
echo "version=$LATEST" >> $GITHUB_OUTPUT
|
||||
echo "message=$MESSAGE" >> $GITHUB_OUTPUT
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
CMakeLists.txt
|
||||
LATEST
|
||||
token: ${{ inputs.token }}
|
||||
commit-message: Bump version to ${{ steps.bump.outputs.version }}
|
||||
title: Bump to ${{ steps.bump.outputs.version }}
|
||||
delete-branch: true
|
||||
branch: github-actions/bump-version-${{ steps.bump.outputs.version }}--${{ github.run_id }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
${{ steps.bump.outputs.message }}
|
||||
|
||||
Auto-bumped by [this workflow](https://github.com/oven-sh/bun/actions/workflows/release.yml)
|
||||
51
.github/actions/setup-bun/action.yml
vendored
51
.github/actions/setup-bun/action.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Setup Bun
|
||||
description: An internal version of the 'oven-sh/setup-bun' action.
|
||||
|
||||
inputs:
|
||||
bun-version:
|
||||
type: string
|
||||
description: "The version of bun to install: 'latest', 'canary', 'bun-v1.2.0', etc."
|
||||
default: latest
|
||||
required: false
|
||||
baseline:
|
||||
type: boolean
|
||||
description: "Whether to use the baseline version of bun."
|
||||
default: false
|
||||
required: false
|
||||
download-url:
|
||||
type: string
|
||||
description: "The base URL to download bun from."
|
||||
default: "https://pub-5e11e972747a44bf9aaf9394f185a982.r2.dev/releases"
|
||||
required: false
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Setup Bun
|
||||
shell: bash
|
||||
run: |
|
||||
case "$(uname -s)" in
|
||||
Linux*) os=linux;;
|
||||
Darwin*) os=darwin;;
|
||||
*) os=windows;;
|
||||
esac
|
||||
case "$(uname -m)" in
|
||||
arm64 | aarch64) arch=aarch64;;
|
||||
*) arch=x64;;
|
||||
esac
|
||||
case "${{ inputs.baseline }}" in
|
||||
true | 1) target="bun-${os}-${arch}-baseline";;
|
||||
*) target="bun-${os}-${arch}";;
|
||||
esac
|
||||
case "${{ inputs.bun-version }}" in
|
||||
latest) release="latest";;
|
||||
canary) release="canary";;
|
||||
*) release="bun-v${{ inputs.bun-version }}";;
|
||||
esac
|
||||
curl -LO "${{ inputs.download-url }}/${release}/${target}.zip" --retry 5
|
||||
unzip ${target}.zip
|
||||
mkdir -p ${{ runner.temp }}/.bun/bin
|
||||
mv ${target}/bun* ${{ runner.temp }}/.bun/bin/
|
||||
chmod +x ${{ runner.temp }}/.bun/bin/*
|
||||
ln -fs ${{ runner.temp }}/.bun/bin/bun ${{ runner.temp }}/.bun/bin/bunx
|
||||
echo "${{ runner.temp }}/.bun/bin" >> ${GITHUB_PATH}
|
||||
50
.github/pull_request_template.md
vendored
50
.github/pull_request_template.md
vendored
@@ -1,50 +0,0 @@
|
||||
### What does this PR do?
|
||||
|
||||
<!-- **Please explain what your changes do**, example: -->
|
||||
|
||||
<!--
|
||||
|
||||
This adds a new flag --bail to bun test. When set, it will stop running tests after the first failure. This is useful for CI environments where you want to fail fast.
|
||||
|
||||
-->
|
||||
|
||||
- [ ] Documentation or TypeScript types (it's okay to leave the rest blank in this case)
|
||||
- [ ] Code changes
|
||||
|
||||
### How did you verify your code works?
|
||||
|
||||
<!-- **For code changes, please include automated tests**. Feel free to uncomment the line below -->
|
||||
|
||||
<!-- I wrote automated tests -->
|
||||
|
||||
<!-- If JavaScript/TypeScript modules or builtins changed:
|
||||
|
||||
- [ ] I included a test for the new code, or existing tests cover it
|
||||
- [ ] I ran my tests locally and they pass (`bun-debug test test-file-name.test`)
|
||||
|
||||
-->
|
||||
|
||||
<!-- If Zig files changed:
|
||||
|
||||
- [ ] I checked the lifetime of memory allocated to verify it's (1) freed and (2) only freed when it should be
|
||||
- [ ] I included a test for the new code, or an existing test covers it
|
||||
- [ ] JSValue used outside of the stack is either wrapped in a JSC.Strong or is JSValueProtect'ed
|
||||
- [ ] I wrote TypeScript/JavaScript tests and they pass locally (`bun-debug test test-file-name.test`)
|
||||
-->
|
||||
|
||||
<!-- If new methods, getters, or setters were added to a publicly exposed class:
|
||||
|
||||
- [ ] I added TypeScript types for the new methods, getters, or setters
|
||||
-->
|
||||
|
||||
<!-- If dependencies in tests changed:
|
||||
|
||||
- [ ] I made sure that specific versions of dependencies are used instead of ranged or tagged versions
|
||||
-->
|
||||
|
||||
<!-- If a new builtin ESM/CJS module was added:
|
||||
|
||||
- [ ] I updated Aliases in `module_loader.zig` to include the new module
|
||||
- [ ] I added a test that imports the module
|
||||
- [ ] I added a test that require() the module
|
||||
-->
|
||||
85
.github/workflows/comment-lint.yml
vendored
85
.github/workflows/comment-lint.yml
vendored
@@ -1,85 +0,0 @@
|
||||
name: C++ Linter comment
|
||||
|
||||
permissions:
|
||||
actions: read
|
||||
pull-requests: write
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- lint-cpp
|
||||
types:
|
||||
- completed
|
||||
|
||||
jobs:
|
||||
comment-lint:
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Comment
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: format.log
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: PR Number
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: pr-number.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Did Fail
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: did_fail.txt
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ github.event.workflow_run.id }}
|
||||
- name: Setup Environment
|
||||
id: env
|
||||
shell: bash
|
||||
run: |
|
||||
# Copy to outputs
|
||||
echo "pr-number=$(cat pr-number.txt)" >> $GITHUB_OUTPUT
|
||||
{
|
||||
echo 'text_output<<EOF'
|
||||
cat format.log
|
||||
echo EOF
|
||||
} >> "$GITHUB_OUTPUT"
|
||||
echo "did_fail=$(cat did_fail.txt)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
- name: Update Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail != '0'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
@${{ github.actor }}, `clang-tidy` had something to share with you about your code:
|
||||
|
||||
```cpp
|
||||
${{ steps.env.outputs.text_output }}
|
||||
```
|
||||
|
||||
Commit: ${{ github.event.workflow_run.head_sha || github.sha }}
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
- name: Update Previous Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.env.outputs.did_fail == '0' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ steps.env.outputs.pr-number }}
|
||||
body: |
|
||||
clang-tidy nits are fixed! Thank you.
|
||||
|
||||
<!-- generated-comment lint-cpp-workflow=${{ github.workflow }} -->
|
||||
edit-mode: replace
|
||||
22
.github/workflows/docs.yml
vendored
22
.github/workflows/docs.yml
vendored
@@ -1,22 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "docs/**"
|
||||
- "packages/bun-types/**.d.ts"
|
||||
- "CONTRIBUTING.md"
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
steps:
|
||||
# redeploy Vercel site when a file in `docs` changes
|
||||
# using VERCEL_DEPLOY_HOOK environment variable
|
||||
- name: Trigger Webhook
|
||||
run: |
|
||||
curl -v ${{ secrets.VERCEL_DEPLOY_HOOK }}
|
||||
65
.github/workflows/format.yml
vendored
65
.github/workflows/format.yml
vendored
@@ -1,65 +0,0 @@
|
||||
name: format
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.11"
|
||||
LLVM_VERSION: "19.1.7"
|
||||
LLVM_VERSION_MAJOR: "19"
|
||||
|
||||
jobs:
|
||||
format:
|
||||
name: Format
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Configure Git
|
||||
run: |
|
||||
git config --global core.autocrlf true
|
||||
git config --global core.ignorecase true
|
||||
git config --global core.precomposeUnicode true
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Setup Dependencies
|
||||
run: |
|
||||
bun install
|
||||
- name: Install LLVM
|
||||
run: |
|
||||
curl -fsSL https://apt.llvm.org/llvm.sh | sudo bash -s -- ${{ env.LLVM_VERSION_MAJOR }} all
|
||||
- name: Setup Zig
|
||||
uses: mlugg/setup-zig@v1
|
||||
with:
|
||||
version: 0.14.0
|
||||
- name: Zig Format
|
||||
run: |
|
||||
zig fmt src/**.zig
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run zig-format`"
|
||||
- name: Prettier Format
|
||||
run: |
|
||||
bun run prettier
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run prettier`"
|
||||
- name: Clang Format
|
||||
run: |
|
||||
bun run clang-format
|
||||
- name: Commit
|
||||
uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: "`bun run clang-format`"
|
||||
184
.github/workflows/labeled.yml
vendored
184
.github/workflows/labeled.yml
vendored
@@ -1,184 +0,0 @@
|
||||
name: Issue Labeled
|
||||
env:
|
||||
BUN_VERSION: 1.1.44
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
|
||||
jobs:
|
||||
# on-bug:
|
||||
# runs-on: ubuntu-latest
|
||||
# if: github.event.label.name == 'bug' || github.event.label.name == 'crash'
|
||||
# permissions:
|
||||
# issues: write
|
||||
# steps:
|
||||
# - name: Checkout
|
||||
# uses: actions/checkout@v4
|
||||
# with:
|
||||
# sparse-checkout: |
|
||||
# scripts
|
||||
# .github
|
||||
# CMakeLists.txt
|
||||
# - name: Setup Bun
|
||||
# uses: ./.github/actions/setup-bun
|
||||
# with:
|
||||
# bun-version: ${{ env.BUN_VERSION }}
|
||||
# - name: "categorize bug"
|
||||
# id: add-labels
|
||||
# env:
|
||||
# GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
# GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
# ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
# shell: bash
|
||||
# run: |
|
||||
# echo '{"dependencies": { "@anthropic-ai/sdk": "latest" }}' > scripts/package.json && bun install --cwd=./scripts
|
||||
# LABELS=$(bun scripts/label-issue.ts)
|
||||
# echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
# - name: Add labels
|
||||
# uses: actions-cool/issues-helper@v3
|
||||
# if: steps.add-labels.outputs.labels != ''
|
||||
# with:
|
||||
# actions: "add-labels"
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# issue-number: ${{ github.event.issue.number }}
|
||||
# labels: ${{ steps.add-labels.outputs.labels }}
|
||||
on-labeled:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.label.name == 'crash' || github.event.label.name == 'needs repro'
|
||||
permissions:
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
scripts
|
||||
.github
|
||||
CMakeLists.txt
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: "add platform and command label"
|
||||
id: add-labels
|
||||
if: github.event.label.name == 'crash'
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
GITHUB_ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
shell: bash
|
||||
run: |
|
||||
LABELS=$(bun scripts/read-issue.ts)
|
||||
bun scripts/is-outdated.ts
|
||||
|
||||
if [[ -f "is-outdated.txt" ]]; then
|
||||
echo "is-outdated=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "outdated.txt" ]]; then
|
||||
echo "outdated=$(cat outdated.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "is-very-outdated.txt" ]]; then
|
||||
echo "is-very-outdated=true" >> $GITHUB_OUTPUT
|
||||
LABELS="$LABELS,old-version"
|
||||
else
|
||||
echo "is-very-outdated=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "latest=$(cat LATEST)" >> $GITHUB_OUTPUT
|
||||
echo "labels=$LABELS" >> $GITHUB_OUTPUT
|
||||
rm -rf is-outdated.txt outdated.txt latest.txt is-very-outdated.txt
|
||||
- name: Generate comment text with Sentry Link
|
||||
if: github.event.label.name == 'crash'
|
||||
# ignore if fail
|
||||
continue-on-error: true
|
||||
id: generate-comment-text
|
||||
env:
|
||||
GITHUB_ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_EVENTS_SECRET }}
|
||||
shell: bash
|
||||
run: |
|
||||
bun scripts/associate-issue-with-sentry.ts
|
||||
|
||||
if [[ -f "sentry-link.txt" ]]; then
|
||||
echo "sentry-link=$(cat sentry-link.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [[ -f "sentry-id.txt" ]]; then
|
||||
echo "sentry-id=$(cat sentry-id.txt)" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Remove old labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash' && steps.add-labels.outputs.is-very-outdated == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: old-version
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: github.event.label.name == 'crash'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: ${{ steps.add-labels.outputs.labels }}
|
||||
- name: Comment outdated
|
||||
if: steps.add-labels.outputs.is-outdated == 'true' && github.event.label.name == 'crash' && steps.generate-comment-text.outputs.sentry-link == ''
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, the latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
- name: Comment with Sentry Link and outdated version
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated == 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
@${{ github.event.issue.user.login }}, thank you for reporting this crash. The latest version of Bun is v${{ steps.add-labels.outputs.latest }}, but this crash was reported on Bun v${{ steps.add-labels.outputs.outdated }}.
|
||||
|
||||
Are you able to reproduce this crash on the latest version of Bun?
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment with Sentry Link
|
||||
if: steps.generate-comment-text.outputs.sentry-link != '' && github.event.label.name == 'crash' && steps.add-labels.outputs.is-outdated != 'true'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Thank you for reporting this crash.
|
||||
|
||||
For Bun's internal tracking, this issue is [${{ steps.generate-comment-text.outputs.sentry-id }}](${{ steps.generate-comment-text.outputs.sentry-link }}).
|
||||
|
||||
<!-- sentry-id: ${{ steps.generate-comment-text.outputs.sentry-id }} -->
|
||||
<!-- sentry-link: ${{ steps.generate-comment-text.outputs.sentry-link }} -->
|
||||
- name: Comment needs repro
|
||||
if: github.event.label.name == 'needs repro'
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: "create-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}. Please provide a [minimal reproduction](https://stackoverflow.com/help/minimal-reproducible-example) using a GitHub repository, [Replit](https://replit.com/@replit/Bun), [CodeSandbox](https://codesandbox.io/templates/bun), or provide a bulleted list of commands to run that reproduce this issue. Issues marked with `needs repro` will be closed if they have no activity within 3 days.
|
||||
21
.github/workflows/lint.yml
vendored
21
.github/workflows/lint.yml
vendored
@@ -1,21 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.10"
|
||||
|
||||
jobs:
|
||||
lint-js:
|
||||
name: "Lint JavaScript"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Lint
|
||||
run: bun lint
|
||||
89
.github/workflows/on-submodule-update.yml
vendored
89
.github/workflows/on-submodule-update.yml
vendored
@@ -1,89 +0,0 @@
|
||||
name: Comment on updated submodule
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- "src/generated_versions_list.zig"
|
||||
- ".github/workflows/on-submodule-update.yml"
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
name: Comment
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
steps:
|
||||
- name: Checkout current
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash generated versions list
|
||||
id: hash
|
||||
run: |
|
||||
echo "hash=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Checkout base
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.base_ref }}
|
||||
sparse-checkout: |
|
||||
src
|
||||
- name: Hash base
|
||||
id: base
|
||||
run: |
|
||||
echo "base=$(sha256sum src/generated_versions_list.zig | cut -d ' ' -f 1)" >> $GITHUB_OUTPUT
|
||||
- name: Compare
|
||||
id: compare
|
||||
run: |
|
||||
if [ "${{ steps.hash.outputs.hash }}" != "${{ steps.base.outputs.base }}" ]; then
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Find Comment
|
||||
id: comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: github-actions[bot]
|
||||
body-includes: <!-- generated-comment submodule-updated -->
|
||||
- name: Write Warning Comment
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
⚠️ **Warning:** @${{ github.actor }}, this PR has changes to submodule versions.
|
||||
|
||||
If this change was intentional, please ignore this message. If not, please undo changes to submodules and rebase your branch.
|
||||
|
||||
<!-- generated-comment submodule-updated -->
|
||||
- name: Add labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'true'
|
||||
with:
|
||||
actions: "add-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Remove labels
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false'
|
||||
with:
|
||||
actions: "remove-labels"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
labels: "changed-submodules"
|
||||
- name: Delete outdated comment
|
||||
uses: actions-cool/issues-helper@v3
|
||||
if: steps.compare.outputs.changed == 'false' && steps.comment.outputs.comment-id != ''
|
||||
with:
|
||||
actions: "delete-comment"
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-id: ${{ steps.comment.outputs.comment-id }}
|
||||
55
.github/workflows/packages-ci.yml
vendored
55
.github/workflows/packages-ci.yml
vendored
@@ -1,55 +0,0 @@
|
||||
name: Packages CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "packages/**"
|
||||
- .prettierrc
|
||||
- .prettierignore
|
||||
- tsconfig.json
|
||||
- oxlint.json
|
||||
- "!**/*.md"
|
||||
|
||||
env:
|
||||
BUN_VERSION: "canary"
|
||||
|
||||
jobs:
|
||||
bun-plugin-svelte:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
bun install
|
||||
pushd ./packages/bun-plugin-svelte && bun install
|
||||
|
||||
- name: Lint
|
||||
run: |
|
||||
bunx oxlint@0.15 --format github --deny-warnings
|
||||
bunx prettier --config ../../.prettierrc --check .
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Check types
|
||||
run: bun check:types
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
|
||||
- name: Test
|
||||
run: bun test
|
||||
working-directory: ./packages/bun-plugin-svelte
|
||||
368
.github/workflows/release.yml
vendored
368
.github/workflows/release.yml
vendored
@@ -1,368 +0,0 @@
|
||||
# TODO: Move this to bash scripts intead of Github Actions
|
||||
# so it can be run from Buildkite, see: .buildkite/scripts/release.sh
|
||||
|
||||
name: Release
|
||||
concurrency: release
|
||||
|
||||
env:
|
||||
BUN_VERSION: ${{ github.event.inputs.tag || github.event.release.tag_name || 'canary' }}
|
||||
BUN_LATEST: ${{ (github.event.inputs.is-latest || github.event.release.tag_name) && 'true' || 'false' }}
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
schedule:
|
||||
- cron: "0 14 * * *" # every day at 6am PST
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
is-latest:
|
||||
description: Is this the latest release?
|
||||
type: boolean
|
||||
default: false
|
||||
tag:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
use-docker:
|
||||
description: Should Docker images be released?
|
||||
type: boolean
|
||||
default: false
|
||||
use-npm:
|
||||
description: Should npm packages be published?
|
||||
type: boolean
|
||||
default: false
|
||||
use-homebrew:
|
||||
description: Should binaries be released to Homebrew?
|
||||
type: boolean
|
||||
default: false
|
||||
use-s3:
|
||||
description: Should binaries be uploaded to S3?
|
||||
type: boolean
|
||||
default: false
|
||||
use-types:
|
||||
description: Should types be released to npm?
|
||||
type: boolean
|
||||
default: false
|
||||
use-definitelytyped:
|
||||
description: "Should types be PR'd to DefinitelyTyped?"
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
sign:
|
||||
name: Sign Release
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.repository_owner == 'oven-sh' }}
|
||||
permissions:
|
||||
contents: write
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Sign Release
|
||||
run: |
|
||||
echo "$GPG_PASSPHRASE" | bun upload-assets -- "${{ env.BUN_VERSION }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
|
||||
npm:
|
||||
name: Release to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-npm == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# To workaround issue
|
||||
ref: main
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-npm -- "${{ env.BUN_VERSION }}" publish
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
npm-types:
|
||||
name: Release types to NPM
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-types == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-types
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: latest
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION != 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.3"
|
||||
- name: Setup Bun
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "canary" # Must be 'canary' so tag is correct
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Setup Tag
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
run: |
|
||||
VERSION=$(bun --version)
|
||||
TAG="${VERSION}-canary.$(date +'%Y%m%dT%H%M%S')"
|
||||
echo "Setup tag: ${TAG}"
|
||||
echo "TAG=${TAG}" >> ${GITHUB_ENV}
|
||||
- name: Build
|
||||
run: bun run build
|
||||
env:
|
||||
BUN_VERSION: ${{ env.TAG || env.BUN_VERSION }}
|
||||
- name: Release (canary)
|
||||
if: ${{ env.BUN_VERSION == 'canary' }}
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
tag: canary
|
||||
- name: Release (latest)
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
uses: JS-DevTools/npm-publish@v1
|
||||
with:
|
||||
package: packages/bun-types/package.json
|
||||
token: ${{ secrets.NPM_TOKEN }}
|
||||
definitelytyped:
|
||||
name: Make pr to DefinitelyTyped to update `bun-types` version
|
||||
runs-on: ubuntu-latest
|
||||
needs: npm-types
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-definitelytyped == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout (DefinitelyTyped)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: DefinitelyTyped/DefinitelyTyped
|
||||
- name: Checkout (bun)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: bun
|
||||
- name: Setup Bun
|
||||
uses: ./bun/.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- id: bun-version
|
||||
run: echo "BUN_VERSION=${BUN_VERSION#bun-v}" >> "$GITHUB_OUTPUT"
|
||||
- name: Update bun-types version in package.json
|
||||
run: |
|
||||
bun -e '
|
||||
const file = Bun.file("./types/bun/package.json");
|
||||
const json = await file.json();
|
||||
const version = "${{ steps.bun-version.outputs.BUN_VERSION }}";
|
||||
json.dependencies["bun-types"] = version;
|
||||
json.version = version.slice(0, version.lastIndexOf(".")) + ".9999";
|
||||
await file.write(JSON.stringify(json, null, 4) + "\n");
|
||||
'
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
if: ${{ env.BUN_LATEST == 'true' && env.BUN_VERSION != 'canary'}}
|
||||
with:
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
add-paths: ./types/bun/package.json
|
||||
title: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
commit-message: "[bun] update to ${{ steps.bun-version.outputs.BUN_VERSION }}"
|
||||
body: |
|
||||
Update `bun-types` version to ${{ steps.bun-version.outputs.BUN_VERSION }}
|
||||
|
||||
https://bun.sh/blog/${{ env.BUN_VERSION }}
|
||||
push-to-fork: oven-sh/DefinitelyTyped
|
||||
branch: ${{env.BUN_VERSION}}
|
||||
docker:
|
||||
name: Release to Dockerhub
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-docker == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- variant: debian
|
||||
suffix: ""
|
||||
- variant: debian
|
||||
suffix: -debian
|
||||
- variant: slim
|
||||
suffix: -slim
|
||||
dir: debian-slim
|
||||
- variant: alpine
|
||||
suffix: -alpine
|
||||
- variant: distroless
|
||||
suffix: -distroless
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Docker emulator
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- id: buildx
|
||||
name: Setup Docker buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
- id: metadata
|
||||
name: Setup Docker metadata
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: oven/bun
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ env.BUN_LATEST == 'true' && matrix.suffix == '' }}
|
||||
type=raw,value=${{ matrix.variant }},enable=${{ env.BUN_LATEST == 'true' }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+.\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
type=match,pattern=(bun-v)?(canary|\d+),group=2,value=${{ env.BUN_VERSION }},suffix=${{ matrix.suffix }}
|
||||
- name: Login to Docker
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Push to Docker
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./dockerhub/${{ matrix.dir || matrix.variant }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
push: true
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
build-args: |
|
||||
BUN_VERSION=${{ env.BUN_VERSION }}
|
||||
homebrew:
|
||||
name: Release to Homebrew
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
permissions:
|
||||
contents: read
|
||||
if: ${{ github.event_name == 'release' || github.event.inputs.use-homebrew == 'true' }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: oven-sh/homebrew-bun
|
||||
token: ${{ secrets.ROBOBUN_TOKEN }}
|
||||
- id: gpg
|
||||
name: Setup GPG
|
||||
uses: crazy-max/ghaction-import-gpg@v5
|
||||
with:
|
||||
gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }}
|
||||
passphrase: ${{ secrets.GPG_PASSPHRASE }}
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: "2.6"
|
||||
- name: Update Tap
|
||||
run: ruby scripts/release.rb "${{ env.BUN_VERSION }}"
|
||||
- name: Commit Tap
|
||||
uses: stefanzweifel/git-auto-commit-action@v4
|
||||
with:
|
||||
commit_options: --gpg-sign=${{ steps.gpg.outputs.keyid }}
|
||||
commit_message: Release ${{ env.BUN_VERSION }}
|
||||
commit_user_name: robobun
|
||||
commit_user_email: robobun@oven.sh
|
||||
commit_author: robobun <robobun@oven.sh>
|
||||
s3:
|
||||
name: Upload to S3
|
||||
runs-on: ubuntu-latest
|
||||
needs: sign
|
||||
if: ${{ github.event_name != 'workflow_dispatch' || github.event.inputs.use-s3 == 'true' }}
|
||||
permissions:
|
||||
contents: read
|
||||
defaults:
|
||||
run:
|
||||
working-directory: packages/bun-release
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- name: Install Dependencies
|
||||
run: bun install
|
||||
- name: Release
|
||||
run: bun upload-s3 -- "${{ env.BUN_VERSION }}"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
|
||||
AWS_ENDPOINT: ${{ secrets.AWS_ENDPOINT }}
|
||||
AWS_BUCKET: bun
|
||||
|
||||
notify-sentry:
|
||||
name: Notify Sentry
|
||||
runs-on: ubuntu-latest
|
||||
needs: s3
|
||||
steps:
|
||||
- name: Notify Sentry
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
ignore_missing: true
|
||||
ignore_empty: true
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
environment: production
|
||||
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.event_name != 'schedule' }}
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
bun-version: "1.2.0"
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
if: ${{ env.BUN_LATEST == 'true' }}
|
||||
with:
|
||||
version: ${{ env.BUN_VERSION }}
|
||||
token: ${{ github.token }}
|
||||
32
.github/workflows/run-lint.yml
vendored
32
.github/workflows/run-lint.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: Lint
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
env:
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
text_output: ${{ steps.lint.outputs.text_output }}
|
||||
json_output: ${{ steps.lint.outputs.json_output }}
|
||||
count: ${{ steps.lint.outputs.count }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
bun --cwd=packages/bun-internal-test install
|
||||
- name: Lint
|
||||
id: lint
|
||||
run: |
|
||||
bun packages/bun-internal-test/src/linter.ts
|
||||
30
.github/workflows/stale.yaml
vendored
30
.github/workflows/stale.yaml
vendored
@@ -1,30 +0,0 @@
|
||||
name: Close inactive issues
|
||||
on:
|
||||
# schedule:
|
||||
# - cron: "15 * * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
close-issues:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/stale@v5
|
||||
with:
|
||||
days-before-issue-close: 5
|
||||
any-of-issue-labels: "needs repro,waiting-for-author"
|
||||
exempt-issue-labels: "neverstale"
|
||||
exempt-pr-labels: "neverstale"
|
||||
remove-stale-when-updated: true
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
stale-issue-message: "This issue is stale and may be closed due to inactivity. If you're still running into this, please leave a comment."
|
||||
close-issue-message: "This issue was closed because it has been inactive for 5 days since being marked as stale."
|
||||
days-before-pr-stale: 30
|
||||
days-before-pr-close: 14
|
||||
stale-pr-message: "This pull request is stale and may be closed due to inactivity."
|
||||
close-pr-message: "This pull request has been closed due to inactivity."
|
||||
repo-token: ${{ github.token }}
|
||||
operations-per-run: 1000
|
||||
32
.github/workflows/test-bump.yml
vendored
32
.github/workflows/test-bump.yml
vendored
@@ -1,32 +0,0 @@
|
||||
name: Test Bump version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
type: string
|
||||
description: What is the release tag? (e.g. "1.0.2", "canary")
|
||||
required: true
|
||||
|
||||
env:
|
||||
BUN_VERSION: "1.2.0"
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
name: "Bump version"
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Bun
|
||||
uses: ./.github/actions/setup-bun
|
||||
with:
|
||||
bun-version: ${{ env.BUN_VERSION }}
|
||||
- name: Bump version
|
||||
uses: ./.github/actions/bump
|
||||
with:
|
||||
version: ${{ inputs.version }}
|
||||
token: ${{ github.token }}
|
||||
19
.github/workflows/typos.yml
vendored
19
.github/workflows/typos.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Typos
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Spellcheck
|
||||
uses: crate-ci/typos@v1.29.4
|
||||
with:
|
||||
files: docs/**/*
|
||||
99
.github/workflows/update-cares.yml
vendored
99
.github/workflows/update-cares.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update c-ares
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 4 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check c-ares version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildCares.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildCares.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildCares.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/c-ares/c-ares/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/c-ares/c-ares/git/ref/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildCares.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildCares.cmake
|
||||
commit-message: "deps: update c-ares to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update c-ares to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-cares-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates c-ares to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/c-ares/c-ares/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-cares.yml)
|
||||
99
.github/workflows/update-libarchive.yml
vendored
99
.github/workflows/update-libarchive.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update libarchive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 3 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libarchive version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibArchive.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibArchive.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibArchive.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/libarchive/libarchive/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/libarchive/libarchive/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibArchive.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibArchive.cmake
|
||||
commit-message: "deps: update libarchive to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libarchive to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libarchive-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libarchive to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/libarchive/libarchive/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libarchive.yml)
|
||||
99
.github/workflows/update-libdeflate.yml
vendored
99
.github/workflows/update-libdeflate.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update libdeflate
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 2 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check libdeflate version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLibDeflate.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLibDeflate.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLibDeflate.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/ebiggers/libdeflate/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/ebiggers/libdeflate/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLibDeflate.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLibDeflate.cmake
|
||||
commit-message: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update libdeflate to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-libdeflate-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates libdeflate to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/ebiggers/libdeflate/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-libdeflate.yml)
|
||||
99
.github/workflows/update-lolhtml.yml
vendored
99
.github/workflows/update-lolhtml.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update lolhtml
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lolhtml version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLolHtml.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLolHtml.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLolHtml.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/cloudflare/lol-html/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/cloudflare/lol-html/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLolHtml.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLolHtml.cmake
|
||||
commit-message: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lolhtml to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lolhtml-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lolhtml to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/cloudflare/lol-html/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lolhtml.yml)
|
||||
99
.github/workflows/update-lshpack.yml
vendored
99
.github/workflows/update-lshpack.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Update lshpack
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 5 * * 0"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check lshpack version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Extract the commit hash from the line after COMMIT
|
||||
CURRENT_VERSION=$(awk '/[[:space:]]*COMMIT[[:space:]]*$/{getline; gsub(/^[[:space:]]+|[[:space:]]+$/,"",$0); print}' cmake/targets/BuildLshpack.cmake)
|
||||
|
||||
if [ -z "$CURRENT_VERSION" ]; then
|
||||
echo "Error: Could not find COMMIT line in BuildLshpack.cmake"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate that it looks like a git hash
|
||||
if ! [[ $CURRENT_VERSION =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid git hash format in BuildLshpack.cmake"
|
||||
echo "Found: $CURRENT_VERSION"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
LATEST_RELEASE=$(curl -sL https://api.github.com/repos/litespeedtech/ls-hpack/releases/latest)
|
||||
if [ -z "$LATEST_RELEASE" ]; then
|
||||
echo "Error: Failed to fetch latest release from GitHub API"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG=$(echo "$LATEST_RELEASE" | jq -r '.tag_name')
|
||||
if [ -z "$LATEST_TAG" ] || [ "$LATEST_TAG" = "null" ]; then
|
||||
echo "Error: Could not extract tag name from GitHub API response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LATEST_TAG_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/ref/tags/$LATEST_TAG" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_TAG_SHA" ] || [ "$LATEST_TAG_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG"
|
||||
exit 1
|
||||
fi
|
||||
LATEST_SHA=$(curl -sL "https://api.github.com/repos/litespeedtech/ls-hpack/git/tags/$LATEST_TAG_SHA" | jq -r '.object.sha')
|
||||
if [ -z "$LATEST_SHA" ] || [ "$LATEST_SHA" = "null" ]; then
|
||||
echo "Error: Could not fetch SHA for tag $LATEST_TAG @ $LATEST_TAG_SHA"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [[ $LATEST_SHA =~ ^[0-9a-f]{40}$ ]]; then
|
||||
echo "Error: Invalid SHA format received from GitHub"
|
||||
echo "Found: $LATEST_SHA"
|
||||
echo "Expected: 40 character hexadecimal string"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "latest=$LATEST_SHA" >> $GITHUB_OUTPUT
|
||||
echo "tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update version if needed
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# Handle multi-line format where COMMIT and its value are on separate lines
|
||||
sed -i -E '/[[:space:]]*COMMIT[[:space:]]*$/{n;s/[[:space:]]*([0-9a-f]+)[[:space:]]*$/ ${{ steps.check-version.outputs.latest }}/}' cmake/targets/BuildLshpack.cmake
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current != steps.check-version.outputs.latest
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
cmake/targets/BuildLshpack.cmake
|
||||
commit-message: "deps: update lshpack to ${{ steps.check-version.outputs.tag }} (${{ steps.check-version.outputs.latest }})"
|
||||
title: "deps: update lshpack to ${{ steps.check-version.outputs.tag }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-lshpack-${{ github.run_number }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates lshpack to version ${{ steps.check-version.outputs.tag }}
|
||||
|
||||
Compare: https://github.com/litespeedtech/ls-hpack/compare/${{ steps.check-version.outputs.current }}...${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-lshpack.yml)
|
||||
82
.github/workflows/update-root-certs.yml
vendored
82
.github/workflows/update-root-certs.yml
vendored
@@ -1,82 +0,0 @@
|
||||
name: Daily Root Certs Update Check
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # Runs at 00:00 UTC every day
|
||||
workflow_dispatch: # Allows manual trigger
|
||||
|
||||
jobs:
|
||||
check-and-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Generate root certs and capture output
|
||||
id: generate-certs
|
||||
run: |
|
||||
cd packages/bun-usockets/
|
||||
OUTPUT=$(bun generate-root-certs.mjs -v)
|
||||
echo "cert_output<<EOF" >> $GITHUB_ENV
|
||||
echo "$OUTPUT" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Check for changes and stage files
|
||||
id: check-changes
|
||||
run: |
|
||||
if [[ -n "$(git status --porcelain)" ]]; then
|
||||
echo "Found changes, staging modified files..."
|
||||
git config --global user.name "github-actions[bot]"
|
||||
git config --global user.email "github-actions[bot]@users.noreply.github.com"
|
||||
|
||||
# Get list of modified files and add them
|
||||
git status --porcelain | while read -r status file; do
|
||||
# Remove leading status and whitespace
|
||||
file=$(echo "$file" | sed 's/^.* //')
|
||||
echo "Adding changed file: $file"
|
||||
git add "$file"
|
||||
done
|
||||
|
||||
echo "changes=true" >> $GITHUB_OUTPUT
|
||||
|
||||
# Store the list of changed files
|
||||
CHANGED_FILES=$(git status --porcelain)
|
||||
echo "changed_files<<EOF" >> $GITHUB_ENV
|
||||
echo "$CHANGED_FILES" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No changes detected"
|
||||
echo "changes=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.check-changes.outputs.changes == 'true'
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
commit-message: "update(root_certs): Update root certificates $(date +'%Y-%m-%d')"
|
||||
title: "update(root_certs) $(date +'%Y-%m-%d')"
|
||||
body: |
|
||||
Automated root certificates update
|
||||
|
||||
${{ env.cert_output }}
|
||||
|
||||
## Changed Files:
|
||||
```
|
||||
${{ env.changed_files }}
|
||||
```
|
||||
branch: certs/update-root-certs-${{ github.run_number }}
|
||||
base: main
|
||||
delete-branch: true
|
||||
labels:
|
||||
- "automation"
|
||||
- "root-certs"
|
||||
111
.github/workflows/update-sqlite3.yml
vendored
111
.github/workflows/update-sqlite3.yml
vendored
@@ -1,111 +0,0 @@
|
||||
name: Update SQLite3
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 6 * * 0" # Run weekly
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-update:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check SQLite version
|
||||
id: check-version
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Get current version from the header file using SQLITE_VERSION_NUMBER
|
||||
CURRENT_VERSION_NUM=$(grep -o '#define SQLITE_VERSION_NUMBER [0-9]\+' src/bun.js/bindings/sqlite/sqlite3_local.h | awk '{print $3}' | tr -d '\n\r')
|
||||
if [ -z "$CURRENT_VERSION_NUM" ]; then
|
||||
echo "Error: Could not find SQLITE_VERSION_NUMBER in sqlite3_local.h"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
CURRENT_MAJOR=$((CURRENT_VERSION_NUM / 1000000))
|
||||
CURRENT_MINOR=$((($CURRENT_VERSION_NUM / 1000) % 1000))
|
||||
CURRENT_PATCH=$((CURRENT_VERSION_NUM % 1000))
|
||||
CURRENT_VERSION="$CURRENT_MAJOR.$CURRENT_MINOR.$CURRENT_PATCH"
|
||||
|
||||
echo "current=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "current_num=$CURRENT_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Fetch SQLite download page
|
||||
DOWNLOAD_PAGE=$(curl -sL https://sqlite.org/download.html)
|
||||
if [ -z "$DOWNLOAD_PAGE" ]; then
|
||||
echo "Error: Failed to fetch SQLite download page"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract latest version and year from the amalgamation link
|
||||
LATEST_INFO=$(echo "$DOWNLOAD_PAGE" | grep -o 'sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1)
|
||||
LATEST_YEAR=$(echo "$DOWNLOAD_PAGE" | grep -o '[0-9]\{4\}/sqlite-amalgamation-[0-9]\{7\}.zip' | head -n1 | cut -d'/' -f1 | tr -d '\n\r')
|
||||
LATEST_VERSION_NUM=$(echo "$LATEST_INFO" | grep -o '[0-9]\{7\}' | tr -d '\n\r')
|
||||
|
||||
if [ -z "$LATEST_VERSION_NUM" ] || [ -z "$LATEST_YEAR" ]; then
|
||||
echo "Error: Could not extract latest version info"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Convert numeric version to semantic version for display
|
||||
LATEST_MAJOR=$((10#$LATEST_VERSION_NUM / 1000000))
|
||||
LATEST_MINOR=$((($LATEST_VERSION_NUM / 10000) % 100))
|
||||
LATEST_PATCH=$((10#$LATEST_VERSION_NUM % 1000))
|
||||
LATEST_VERSION="$LATEST_MAJOR.$LATEST_MINOR.$LATEST_PATCH"
|
||||
|
||||
echo "latest=$LATEST_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "latest_year=$LATEST_YEAR" >> $GITHUB_OUTPUT
|
||||
echo "latest_num=$LATEST_VERSION_NUM" >> $GITHUB_OUTPUT
|
||||
|
||||
# Debug output
|
||||
echo "Current version: $CURRENT_VERSION ($CURRENT_VERSION_NUM)"
|
||||
echo "Latest version: $LATEST_VERSION ($LATEST_VERSION_NUM)"
|
||||
|
||||
- name: Update SQLite if needed
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
cd $TEMP_DIR
|
||||
|
||||
echo "Downloading from: https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
|
||||
# Download and extract latest version
|
||||
wget "https://sqlite.org/${{ steps.check-version.outputs.latest_year }}/sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
unzip "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}.zip"
|
||||
cd "sqlite-amalgamation-${{ steps.check-version.outputs.latest_num }}"
|
||||
|
||||
# Add header comment and copy files
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
cat sqlite3.c >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3.c
|
||||
|
||||
echo "// clang-format off" > $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
cat sqlite3.h >> $GITHUB_WORKSPACE/src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
|
||||
- name: Create Pull Request
|
||||
if: success() && steps.check-version.outputs.current_num < steps.check-version.outputs.latest_num
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
add-paths: |
|
||||
src/bun.js/bindings/sqlite/sqlite3.c
|
||||
src/bun.js/bindings/sqlite/sqlite3_local.h
|
||||
commit-message: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
title: "deps: update sqlite to ${{ steps.check-version.outputs.latest }}"
|
||||
delete-branch: true
|
||||
branch: deps/update-sqlite-${{ steps.check-version.outputs.latest }}
|
||||
body: |
|
||||
## What does this PR do?
|
||||
|
||||
Updates SQLite to version ${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Compare: https://sqlite.org/src/vdiff?from=${{ steps.check-version.outputs.current }}&to=${{ steps.check-version.outputs.latest }}
|
||||
|
||||
Auto-updated by [this workflow](https://github.com/oven-sh/bun/actions/workflows/update-sqlite3.yml)
|
||||
212
.gitignore
vendored
212
.gitignore
vendored
@@ -1,186 +1,48 @@
|
||||
.DS_Store
|
||||
.env
|
||||
.envrc
|
||||
.eslintcache
|
||||
.idea
|
||||
.next
|
||||
.ninja_deps
|
||||
.ninja_log
|
||||
.npm
|
||||
.npm.gz
|
||||
.parcel-cache
|
||||
.swcrc
|
||||
.trace
|
||||
.uuid
|
||||
.vs
|
||||
.vscode/clang*
|
||||
.vscode/cpp*
|
||||
.zig-cache
|
||||
.bake-debug
|
||||
*.a
|
||||
*.bc
|
||||
*.big
|
||||
*.blob
|
||||
*.bun
|
||||
*.crash
|
||||
*.database
|
||||
*.db
|
||||
*.dmg
|
||||
*.dSYM
|
||||
*.generated.ts
|
||||
*.jsb
|
||||
*.lib
|
||||
*.log
|
||||
zig-cache
|
||||
*.wasm
|
||||
|
||||
*.o
|
||||
*.a
|
||||
profile.json
|
||||
|
||||
/package.json
|
||||
node_modules
|
||||
.swcrc
|
||||
yarn.lock
|
||||
dist
|
||||
*.log
|
||||
*.out.js
|
||||
*.out.refresh.js
|
||||
*.pdb
|
||||
*.sqlite
|
||||
*.tmp
|
||||
*.trace
|
||||
*.wat
|
||||
*.zip
|
||||
**/.verdaccio-db.json
|
||||
**/*.dir
|
||||
**/*.pdb
|
||||
**/*.sln*
|
||||
**/*.vcxproj*
|
||||
**/package-lock.json
|
||||
/.cache
|
||||
/.webkit-cache
|
||||
/build-*/
|
||||
/bun-webkit
|
||||
/kcov-out
|
||||
/test-report.json
|
||||
/test-report.md
|
||||
/test.js
|
||||
/test.ts
|
||||
/test.zig
|
||||
/testdir
|
||||
/package-lock.json
|
||||
build
|
||||
build.ninja
|
||||
bun-binary
|
||||
bun-mimalloc
|
||||
bun-nomimalloc
|
||||
bun-singlehtreaded
|
||||
bun-test-scratch
|
||||
bun-zigld
|
||||
cmake_install.cmake
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
cold-jsc-start
|
||||
cold-jsc-start.d
|
||||
compile_commands.json
|
||||
*.wat
|
||||
zig-out
|
||||
pnpm-lock.yaml
|
||||
README.md.template
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/.gitattributes
|
||||
out
|
||||
outdir
|
||||
|
||||
.trace
|
||||
cover
|
||||
coverage
|
||||
coverv
|
||||
dist
|
||||
esbuilddir
|
||||
examples/lotta-modules/bun-nofscache
|
||||
examples/lotta-modules/bun-old
|
||||
examples/lotta-modules/bun-yday
|
||||
failing-tests.txt
|
||||
*.trace
|
||||
bench
|
||||
github
|
||||
make-dev-stats.csv
|
||||
misctools/fetch
|
||||
misctools/machbench
|
||||
misctools/sha
|
||||
myscript.sh
|
||||
node_modules
|
||||
node_modules_*
|
||||
out
|
||||
out.*
|
||||
outcss
|
||||
outdir
|
||||
outdir/
|
||||
packages/*/*.wasm
|
||||
packages/bun-*/*.o
|
||||
packages/bun-*/bun
|
||||
packages/bun-*/bun-profile
|
||||
packages/bun-*/debug-bun
|
||||
packages/bun-cli/bin/*
|
||||
packages/bun-cli/postinstall.js
|
||||
packages/bun-wasm/*.cjs
|
||||
packages/bun-wasm/*.d.cts
|
||||
packages/bun-wasm/*.d.mts
|
||||
packages/bun-wasm/*.d.ts
|
||||
packages/bun-wasm/*.js
|
||||
packages/bun-wasm/*.map
|
||||
packages/bun-wasm/*.mjs
|
||||
packages/debug-*
|
||||
out
|
||||
.parcel-cache
|
||||
esbuilddir
|
||||
*.jsb
|
||||
parceldist
|
||||
pnpm-lock.yaml
|
||||
profile.json
|
||||
README.md.template
|
||||
release/
|
||||
scripts/env.local
|
||||
sign.*.json
|
||||
sign.json
|
||||
src/bake/generated.ts
|
||||
src/generated_enum_extractor.zig
|
||||
src/bun.js/bindings-obj
|
||||
src/bun.js/bindings/GeneratedJS2Native.zig
|
||||
src/bun.js/bindings/GeneratedBindings.zig
|
||||
src/bun.js/debug-bindings-obj
|
||||
src/deps/zig-clap/.gitattributes
|
||||
src/deps/zig-clap/.github
|
||||
src/deps/zig-clap/example
|
||||
src/deps/zig-clap/README.md
|
||||
src/fallback.version
|
||||
src/js/out/DebugPath.h
|
||||
src/js/out/functions*
|
||||
src/js/out/modules*
|
||||
src/js/out/tmp
|
||||
src/node-fallbacks/node_modules
|
||||
src/node-fallbacks/out/*
|
||||
src/runtime.version
|
||||
src/tests.zig
|
||||
test.txt
|
||||
test/js/bun/glob/fixtures
|
||||
test/node.js/upstream
|
||||
tsconfig.tsbuildinfo
|
||||
esbuilddir
|
||||
outdir/
|
||||
outcss
|
||||
.next
|
||||
txt.js
|
||||
x64
|
||||
yarn.lock
|
||||
zig-cache
|
||||
zig-out
|
||||
test/node.js/upstream
|
||||
.zig-cache
|
||||
scripts/env.local
|
||||
*.generated.ts
|
||||
src/bake/generated.ts
|
||||
test/cli/install/registry/packages/publish-pkg-*
|
||||
test/cli/install/registry/packages/@secret/publish-pkg-8
|
||||
test/js/third_party/prisma/prisma/sqlite/dev.db-journal
|
||||
tmp
|
||||
codegen-for-zig-team.tar.gz
|
||||
|
||||
# Dependencies
|
||||
/vendor
|
||||
|
||||
# Dependencies (before CMake)
|
||||
# These can be removed in the far future
|
||||
/src/bun.js/WebKit
|
||||
/src/deps/boringssl
|
||||
/src/deps/brotli
|
||||
/src/deps/c*ares
|
||||
/src/deps/libarchive
|
||||
/src/deps/libdeflate
|
||||
/src/deps/libuv
|
||||
/src/deps/lol*html
|
||||
/src/deps/ls*hpack
|
||||
/src/deps/mimalloc
|
||||
/src/deps/picohttpparser
|
||||
/src/deps/tinycc
|
||||
/src/deps/WebKit
|
||||
/src/deps/zig
|
||||
/src/deps/zlib
|
||||
/src/deps/zstd
|
||||
|
||||
# Generated files
|
||||
|
||||
.buildkite/ci.yml
|
||||
*.sock
|
||||
scratch*.{js,ts,tsx,cjs,mjs}
|
||||
|
||||
*.bun-build
|
||||
.idea
|
||||
.vscode/cpp*
|
||||
9
.gitmodules
vendored
Normal file
9
.gitmodules
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# [submodule "src/deps/zig-clap"]
|
||||
# path = src/deps/zig-clap
|
||||
# url = https://github.com/Hejsil/zig-clap
|
||||
[submodule "src/deps/picohttpparser"]
|
||||
path = src/deps/picohttpparser
|
||||
url = https://github.com/h2o/picohttpparser/
|
||||
[submodule "src/javascript/jsc/WebKit"]
|
||||
path = src/javascript/jsc/WebKit
|
||||
url = git@github.com:/Jarred-Sumner/WebKit
|
||||
16
.lldbinit
16
.lldbinit
@@ -1,16 +0,0 @@
|
||||
# Tell LLDB what to do when the debugged process receives SIGPWR: pass it through to the process
|
||||
# (-p), but do not stop the process (-s) or notify the user (-n).
|
||||
#
|
||||
# JSC's garbage collector sends this signal (as configured by Bun WebKit in
|
||||
# Thread::initializePlatformThreading() in ThreadingPOSIX.cpp) to the JS thread to suspend or resume
|
||||
# it. So stopping the process would just create noise when debugging any long-running script.
|
||||
process handle -p true -s false -n false SIGPWR
|
||||
|
||||
command script import misctools/lldb/lldb_pretty_printers.py
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
|
||||
command script import misctools/lldb/lldb_webkit.py
|
||||
|
||||
command script delete btjs
|
||||
command alias btjs p {printf("gathering btjs trace...\n");printf("%s\n", (char*)dumpBtjsTrace())}
|
||||
2
.mailmap
2
.mailmap
@@ -1,2 +0,0 @@
|
||||
# To learn more about git's mailmap: https://ntietz.com/blog/git-mailmap-for-name-changes
|
||||
chloe caruso <git@paperclover.net> <me@paperdave.net>
|
||||
@@ -1,10 +0,0 @@
|
||||
src/bun.js/WebKit
|
||||
vendor
|
||||
test/snapshots
|
||||
test/js/deno
|
||||
test/node.js
|
||||
src/react-refresh.js
|
||||
*.min.js
|
||||
test/snippets
|
||||
test/js/node/test
|
||||
bun.lock
|
||||
24
.prettierrc
24
.prettierrc
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 120,
|
||||
"trailingComma": "all",
|
||||
"useTabs": false,
|
||||
"quoteProps": "preserve",
|
||||
"overrides": [
|
||||
{
|
||||
"files": [".vscode/*.json"],
|
||||
"options": {
|
||||
"parser": "jsonc",
|
||||
"quoteProps": "preserve",
|
||||
"singleQuote": false,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
},
|
||||
{
|
||||
"files": ["*.md"],
|
||||
"options": {
|
||||
"printWidth": 80
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
[type.md]
|
||||
extend-ignore-words-re = ["^ba"]
|
||||
122
.vscode/c_cpp_properties.json
vendored
122
.vscode/c_cpp_properties.json
vendored
@@ -1,39 +1,30 @@
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"compileCommands": "${workspaceFolder}/build/debug/compile_commands.json",
|
||||
"name": "Mac",
|
||||
"forcedInclude": [
|
||||
"${workspaceFolder}/src/javascript/jsc/bindings/root.h"
|
||||
],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/bun-webkit/include",
|
||||
"${workspaceFolder}/build/debug/codegen",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/src",
|
||||
"${workspaceFolder}/packages/",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/WTF/Headers",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/*",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/bindings/",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/Source/bmalloc/",
|
||||
"${workspaceFolder}/src/javascript/jsc/WebKit/WebKitBuild/Release/ICU/Headers/"
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/build/bun-webkit/include",
|
||||
"${workspaceFolder}/src/bun.js/bindings",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/vendor/*",
|
||||
"${workspaceFolder}/vendor/boringssl/include/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/*",
|
||||
"${workspaceFolder}/packages/bun-uws/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/javascript/jsc/bindings/*",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/WTF/Headers/**",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/WebKitBuild/Release/*",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/bindings/**",
|
||||
"${workspaceFolder}/src/JavaScript/jsc/WebKit/Source/bmalloc/**",
|
||||
"${workspaceFolder}/src/javascript/jsc/WebKit/WebKitBuild/Release/ICU/Headers/"
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb",
|
||||
"databaseFilename": ".vscode/cppdb"
|
||||
},
|
||||
"defines": [
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
@@ -43,78 +34,15 @@
|
||||
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
"BUILDING_JSCONLY__",
|
||||
"USE_FOUNDATION=1",
|
||||
"ASSERT_ENABLED=1",
|
||||
"DU_DISABLE_RENAMING=1",
|
||||
"ASSERT_ENABLED=0",
|
||||
"DU_DISABLE_RENAMING=1"
|
||||
],
|
||||
"macFrameworkPath": [],
|
||||
"compilerPath": "${workspaceFolder}/.vscode/clang++",
|
||||
"compilerPath": "/usr/local/opt/llvm/bin/clang",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "c++20",
|
||||
},
|
||||
{
|
||||
"name": "BunWithJSCDebug",
|
||||
"forcedInclude": ["${workspaceFolder}/src/bun.js/bindings/root.h"],
|
||||
"includePath": [
|
||||
"${workspaceFolder}/build/debug/codegen",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/modules/",
|
||||
"${workspaceFolder}/src/js/builtins/",
|
||||
"${workspaceFolder}/src/js/out",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/packages/bun-usockets/src",
|
||||
"${workspaceFolder}/packages/",
|
||||
],
|
||||
"browse": {
|
||||
"path": [
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/ICU/Headers/",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/PrivateHeaders/**",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/WTF/Headers/**",
|
||||
"${workspaceFolder}/vendor/WebKit/WebKitBuild/Debug/bmalloc/Headers/**",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/*",
|
||||
"${workspaceFolder}/src/napi/*",
|
||||
"${workspaceFolder}/src/bun.js/bindings/sqlite/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcrypto/",
|
||||
"${workspaceFolder}/src/bun.js/bindings/webcore/",
|
||||
"${workspaceFolder}/src/js/builtins/*",
|
||||
"${workspaceFolder}/src/js/out/*",
|
||||
"${workspaceFolder}/src/bun.js/modules/*",
|
||||
"${workspaceFolder}/vendor",
|
||||
"${workspaceFolder}/vendor/boringssl/include/",
|
||||
"${workspaceFolder}/packages/bun-usockets/",
|
||||
"${workspaceFolder}/packages/bun-uws/",
|
||||
"${workspaceFolder}/src/napi",
|
||||
],
|
||||
"limitSymbolsToIncludedHeaders": true,
|
||||
"databaseFilename": ".vscode/cppdb_debug",
|
||||
},
|
||||
"defines": [
|
||||
"STATICALLY_LINKED_WITH_JavaScriptCore=1",
|
||||
"STATICALLY_LINKED_WITH_WTF=1",
|
||||
"BUILDING_WITH_CMAKE=1",
|
||||
"NOMINMAX",
|
||||
"ENABLE_INSPECTOR_ALTERNATE_DISPATCHERS=0",
|
||||
"BUILDING_JSCONLY__",
|
||||
"USE_FOUNDATION=1",
|
||||
"ASSERT_ENABLED=1",
|
||||
"DU_DISABLE_RENAMING=1",
|
||||
],
|
||||
"macFrameworkPath": [],
|
||||
"compilerPath": "${workspaceFolder}/.vscode/clang++",
|
||||
"cStandard": "c17",
|
||||
"cppStandard": "c++20",
|
||||
},
|
||||
"cppStandard": "c++11",
|
||||
"intelliSenseMode": "macos-clang-x64"
|
||||
}
|
||||
],
|
||||
"version": 4,
|
||||
"version": 4
|
||||
}
|
||||
|
||||
33
.vscode/extensions.json
vendored
33
.vscode/extensions.json
vendored
@@ -1,33 +0,0 @@
|
||||
{
|
||||
"recommendations": [
|
||||
// Zig
|
||||
"ziglang.vscode-zig",
|
||||
|
||||
// C/C++
|
||||
"clang.clangd",
|
||||
"ms-vscode.cmake-tools",
|
||||
"xaver.clang-format",
|
||||
"vadimcn.vscode-lldb",
|
||||
|
||||
// JavaScript
|
||||
"oven.bun-vscode",
|
||||
"esbenp.prettier-vscode",
|
||||
|
||||
// TypeScript
|
||||
"better-ts-errors.better-ts-errors",
|
||||
"MylesMurphy.prettify-ts",
|
||||
|
||||
// Markdown
|
||||
"bierner.markdown-preview-github-styles",
|
||||
"bierner.markdown-emoji",
|
||||
"bierner.emojisense",
|
||||
"bierner.markdown-checkbox",
|
||||
"bierner.jsdoc-markdown-highlighting",
|
||||
|
||||
// TOML
|
||||
"tamasfe.even-better-toml",
|
||||
|
||||
// Other
|
||||
"bierner.comment-tagged-templates",
|
||||
],
|
||||
}
|
||||
1357
.vscode/launch.json
vendored
1357
.vscode/launch.json
vendored
File diff suppressed because it is too large
Load Diff
200
.vscode/settings.json
vendored
200
.vscode/settings.json
vendored
@@ -1,171 +1,53 @@
|
||||
{
|
||||
// Editor
|
||||
"editor.tabSize": 2,
|
||||
"editor.insertSpaces": true,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnSaveMode": "file",
|
||||
|
||||
// Search
|
||||
"search.quickOpen.includeSymbols": false,
|
||||
"git.autoRepositoryDetection": "openEditors",
|
||||
"search.quickOpen.includeSymbols": true,
|
||||
"search.seedWithNearestWord": true,
|
||||
"search.smartCase": true,
|
||||
"search.exclude": {
|
||||
"node_modules": true,
|
||||
".git": true,
|
||||
"vendor/*/**": true,
|
||||
"test/node.js/upstream": true,
|
||||
// This will fill up your whole search history.
|
||||
"test/js/node/test/fixtures": true,
|
||||
"test/js/node/test/common": true,
|
||||
"src/javascript/jsc/WebKit/**/*": true
|
||||
},
|
||||
"search.followSymlinks": false,
|
||||
"search.useIgnoreFiles": true,
|
||||
|
||||
// Git
|
||||
"git.autoRepositoryDetection": "openEditors",
|
||||
"git.ignoreSubmodules": true,
|
||||
"git.ignoreLimitWarning": true,
|
||||
|
||||
// Zig
|
||||
"zig.initialSetupDone": true,
|
||||
"zig.buildOption": "build",
|
||||
"zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib",
|
||||
"zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"],
|
||||
"zig.zls.buildOnSaveStep": "check",
|
||||
// "zig.zls.enableBuildOnSave": true,
|
||||
// "zig.buildOnSave": true,
|
||||
"zig.buildFilePath": "${workspaceFolder}/build.zig",
|
||||
"zig.path": "${workspaceFolder}/vendor/zig/zig.exe",
|
||||
"zig.zls.path": "${workspaceFolder}/vendor/zig/zls.exe",
|
||||
"zig.formattingProvider": "zls",
|
||||
"zig.zls.enableInlayHints": false,
|
||||
"[zig]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.useTabStops": false,
|
||||
"editor.defaultFormatter": "ziglang.vscode-zig",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "never",
|
||||
},
|
||||
},
|
||||
|
||||
// lldb
|
||||
"lldb.launch.initCommands": ["command source ${workspaceFolder}/.lldbinit"],
|
||||
"lldb.verboseLogging": false,
|
||||
|
||||
// C++
|
||||
"cmake.configureOnOpen": false,
|
||||
"C_Cpp.errorSquiggles": "enabled",
|
||||
"[cpp]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"[c]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"[h]": {
|
||||
"editor.tabSize": 4,
|
||||
"editor.defaultFormatter": "xaver.clang-format",
|
||||
},
|
||||
"clangd.arguments": ["--header-insertion=never"],
|
||||
|
||||
// JavaScript
|
||||
"prettier.enable": true,
|
||||
"prettier.configPath": ".prettierrc",
|
||||
"eslint.workingDirectories": ["${workspaceFolder}/packages/bun-types"],
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
"prettier.prettierPath": "./node_modules/prettier",
|
||||
|
||||
// TypeScript
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
// JSON
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
// Markdown
|
||||
"[markdown]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"editor.unicodeHighlight.ambiguousCharacters": true,
|
||||
"editor.unicodeHighlight.invisibleCharacters": true,
|
||||
"diffEditor.ignoreTrimWhitespace": false,
|
||||
"editor.wordWrap": "on",
|
||||
"editor.quickSuggestions": {
|
||||
"comments": "off",
|
||||
"strings": "off",
|
||||
"other": "off",
|
||||
},
|
||||
},
|
||||
|
||||
// TOML
|
||||
"[toml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
// YAML
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
},
|
||||
|
||||
// Docker
|
||||
"[dockerfile]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
|
||||
// Files
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.svn": true,
|
||||
"**/.hg": true,
|
||||
"**/CVS": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/Thumbs.db": true,
|
||||
"**/*.xcworkspacedata": true,
|
||||
"**/*.xcscheme": true,
|
||||
"**/*.xcodeproj": true,
|
||||
"**/*.i": true,
|
||||
},
|
||||
"files.associations": {
|
||||
"*.css": "tailwindcss",
|
||||
"*.idl": "cpp",
|
||||
"*.mdc": "markdown",
|
||||
"array": "cpp",
|
||||
"ios": "cpp",
|
||||
"oxlint.json": "jsonc",
|
||||
"bun.lock": "jsonc",
|
||||
},
|
||||
"C_Cpp.files.exclude": {
|
||||
"**/.vscode": true,
|
||||
"WebKit/JSTests": true,
|
||||
"WebKit/Tools": true,
|
||||
"WebKit/WebDriverTests": true,
|
||||
"WebKit/WebKit.xcworkspace": true,
|
||||
"WebKit/WebKitLibraries": true,
|
||||
"WebKit/Websites": true,
|
||||
"WebKit/resources": true,
|
||||
"WebKit/LayoutTests": true,
|
||||
"WebKit/ManualTests": true,
|
||||
"WebKit/PerformanceTests": true,
|
||||
"WebKit/WebKitLegacy": true,
|
||||
"WebKit/WebCore": true,
|
||||
"WebKit/WebDriver": true,
|
||||
"WebKit/WebKitBuild": true,
|
||||
"WebKit/WebInspectorUI": true,
|
||||
"src/javascript/jsc/WebKit/JSTests": true,
|
||||
"src/javascript/jsc/WebKit/Tools": true,
|
||||
"src/javascript/jsc/WebKit/WebDriverTests": true,
|
||||
"src/javascript/jsc/WebKit/WebKit.xcworkspace": true,
|
||||
"src/javascript/jsc/WebKit/WebKitLibraries": true,
|
||||
"src/javascript/jsc/WebKit/Websites": true,
|
||||
"src/javascript/jsc/WebKit/resources": true,
|
||||
"src/javascript/jsc/WebKit/LayoutTests": true,
|
||||
"src/javascript/jsc/WebKit/ManualTests": true,
|
||||
"src/javascript/jsc/WebKit/PerformanceTests": true,
|
||||
"src/javascript/jsc/WebKit/WebKitLegacy": true,
|
||||
"src/javascript/jsc/WebKit/WebCore": true,
|
||||
"src/javascript/jsc/WebKit/WebDriver": true,
|
||||
"src/javascript/jsc/WebKit/WebKitBuild": true,
|
||||
"src/javascript/jsc/WebKit/WebInspectorUI": true
|
||||
},
|
||||
"git.detectSubmodules": false,
|
||||
"files.associations": {
|
||||
"*.idl": "cpp",
|
||||
"memory": "cpp",
|
||||
"iostream": "cpp",
|
||||
"algorithm": "cpp",
|
||||
"random": "cpp",
|
||||
"ios": "cpp",
|
||||
"filesystem": "cpp",
|
||||
"__locale": "cpp",
|
||||
"type_traits": "cpp",
|
||||
"__mutex_base": "cpp",
|
||||
"__string": "cpp",
|
||||
"string": "cpp",
|
||||
"string_view": "cpp",
|
||||
"typeinfo": "cpp",
|
||||
"__config": "cpp",
|
||||
"__nullptr": "cpp",
|
||||
"exception": "cpp",
|
||||
"__bit_reference": "cpp",
|
||||
"atomic": "cpp",
|
||||
"utility": "cpp",
|
||||
"sstream": "cpp"
|
||||
}
|
||||
}
|
||||
|
||||
102
.vscode/tasks.json
vendored
102
.vscode/tasks.json
vendored
@@ -2,58 +2,58 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Build Bun",
|
||||
"type": "shell",
|
||||
"command": "bun run build",
|
||||
"label": "build",
|
||||
"type": "process",
|
||||
"command": "zig",
|
||||
"args": ["build"],
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "silent",
|
||||
"focus": false,
|
||||
"panel": "shared",
|
||||
"showReuseMessage": false,
|
||||
"clear": false
|
||||
},
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true,
|
||||
},
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "zig",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+?):(\\d+):(\\d+): (error|warning|note): (.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s+(.+)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"owner": "clang",
|
||||
"fileLocation": ["relative", "${workspaceFolder}"],
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^([^:]+):(\\d+):(\\d+):\\s+(warning|error|note|remark):\\s+(.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"severity": 4,
|
||||
"message": 5,
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*(.*)$",
|
||||
"message": 1,
|
||||
"loop": true,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "shared",
|
||||
"clear": true,
|
||||
},
|
||||
"isDefault": true
|
||||
}
|
||||
},
|
||||
],
|
||||
{
|
||||
"label": "run",
|
||||
"type": "process",
|
||||
"command": "zig",
|
||||
"args": ["run", "${file}"],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"showReuseMessage": false,
|
||||
"clear": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "test",
|
||||
"type": "shell",
|
||||
"command": "zig",
|
||||
"args": [
|
||||
"test",
|
||||
"${file}",
|
||||
"--main-pkg-path",
|
||||
"${workspaceFolder}",
|
||||
"-femit-bin=${workspaceFolder}/zig-out/bin/test",
|
||||
";",
|
||||
"true"
|
||||
],
|
||||
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"showReuseMessage": false,
|
||||
"clear": true,
|
||||
"panel": "new",
|
||||
"reveal": "always"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
cmake_minimum_required(VERSION 3.24)
|
||||
message(STATUS "Configuring Bun")
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH
|
||||
${CMAKE_SOURCE_DIR}/cmake
|
||||
${CMAKE_SOURCE_DIR}/cmake/targets
|
||||
${CMAKE_SOURCE_DIR}/cmake/tools
|
||||
${CMAKE_SOURCE_DIR}/cmake/analysis
|
||||
${CMAKE_SOURCE_DIR}/cmake/scripts
|
||||
)
|
||||
|
||||
include(Policies)
|
||||
include(Globals)
|
||||
|
||||
if (CMAKE_HOST_WIN32)
|
||||
# Workaround for TLS certificate verification issue on Windows when downloading from GitHub
|
||||
# Remove this once we've bumped the CI machines build image
|
||||
set(CMAKE_TLS_VERIFY 0)
|
||||
endif()
|
||||
|
||||
# --- Compilers ---
|
||||
|
||||
if(CMAKE_HOST_APPLE)
|
||||
include(SetupMacSDK)
|
||||
endif()
|
||||
include(SetupLLVM)
|
||||
include(SetupCcache)
|
||||
|
||||
# --- Project ---
|
||||
|
||||
parse_package_json(VERSION_VARIABLE DEFAULT_VERSION)
|
||||
optionx(VERSION STRING "The version of Bun" DEFAULT ${DEFAULT_VERSION})
|
||||
project(Bun VERSION ${VERSION})
|
||||
include(Options)
|
||||
include(CompilerFlags)
|
||||
|
||||
# --- Tools ---
|
||||
|
||||
include(SetupGit)
|
||||
include(SetupBuildkite)
|
||||
include(SetupBun)
|
||||
include(SetupEsbuild)
|
||||
include(SetupZig)
|
||||
include(SetupRust)
|
||||
|
||||
# --- Targets ---
|
||||
|
||||
include(BuildBun)
|
||||
|
||||
# --- Analysis ---
|
||||
|
||||
if(ENABLE_ANALYSIS)
|
||||
include(RunClangFormat)
|
||||
include(RunClangTidy)
|
||||
include(RunZigFormat)
|
||||
include(RunPrettier)
|
||||
endif()
|
||||
@@ -1,12 +0,0 @@
|
||||
## Code of conduct
|
||||
|
||||
- We are committed to providing a friendly, safe and welcoming environment for all, regardless of level of experience, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, nationality, or other similar characteristic.
|
||||
- Please avoid using overtly sexual aliases or other nicknames that might detract from a friendly, safe and welcoming environment for all.
|
||||
- Please be kind and courteous. There’s no need to be mean or rude.
|
||||
- Respect that people have differences of opinion and that every design or implementation choice carries a trade-off and numerous costs. There is seldom a right answer.
|
||||
- Please keep unstructured critique to a minimum. If you have solid ideas you want to experiment with, make a fork and see how it works.
|
||||
- We will exclude you from interaction if you insult, demean or harass anyone. That is not welcome behavior. We interpret the term “harassment” as including the definition in the [Citizen Code of Conduct](https://github.com/stumpsyn/policies/blob/master/citizen_code_of_conduct.md); if you have any lack of clarity about what might be included in that concept, please read their definition. In particular, we don’t tolerate behavior that excludes people in socially marginalized groups.
|
||||
- Private harassment is also unacceptable. No matter who you are, if you feel you have been or are being harassed or made uncomfortable by a community member, please contact one of the channel ops or an employee of Oven immediately. Whether you’re a regular contributor or a newcomer, we care about making this community a safe place for you and we’ve got your back.
|
||||
- Likewise any spamming, trolling, flaming, baiting or other attention-stealing behavior is not welcome.
|
||||
|
||||
This code of conduct is adapted from the [Rust Code of Conduct](https://www.rust-lang.org/policies/code-of-conduct).
|
||||
329
CONTRIBUTING.md
329
CONTRIBUTING.md
@@ -1,329 +0,0 @@
|
||||
Configuring a development environment for Bun can take 10-30 minutes depending on your internet connection and computer speed. You will need ~10GB of free disk space for the repository and build artifacts.
|
||||
|
||||
If you are using Windows, please refer to [this guide](https://bun.sh/docs/project/building-windows)
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
Using your system's package manager, install Bun's dependencies:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install automake ccache cmake coreutils gnu-sed go icu4c libiconv libtool ninja pkg-config rust ruby
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ sudo apt install curl wget lsb-release software-properties-common cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby-full xz-utils
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S base-devel ccache cmake git go libiconv libtool make ninja pkg-config python rust sed unzip ruby
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install cargo ccache cmake git golang libtool ninja-build pkg-config rustc ruby libatomic-static libstdc++-static sed unzip which libicu-devel 'perl(Math::BigInt)'
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install go cmake ninja automake git icu rustup && rustup toolchain install stable
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> **Note**: The Zig compiler is automatically installed and updated by the build scripts. Manual installation is not required.
|
||||
|
||||
Before starting, you will need to already have a release build of Bun installed, as we use our bundler to transpile and minify our code, as well as for code generation scripts.
|
||||
|
||||
{% codetabs %}
|
||||
|
||||
```bash#Native
|
||||
$ curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
```bash#npm
|
||||
$ npm install -g bun
|
||||
```
|
||||
|
||||
```bash#Homebrew
|
||||
$ brew tap oven-sh/bun
|
||||
$ brew install bun
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
## Install LLVM
|
||||
|
||||
Bun requires LLVM 19 (`clang` is part of LLVM). This version requirement is to match WebKit (precompiled), as mismatching versions will cause memory allocation failures at runtime. In most cases, you can install LLVM through your system package manager:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
$ brew install llvm@19
|
||||
```
|
||||
|
||||
```bash#Ubuntu/Debian
|
||||
$ # LLVM has an automatic installation script that is compatible with all versions of Ubuntu
|
||||
$ wget https://apt.llvm.org/llvm.sh -O - | sudo bash -s -- 19 all
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
$ sudo pacman -S llvm clang lld
|
||||
```
|
||||
|
||||
```bash#Fedora
|
||||
$ sudo dnf install llvm clang lld-devel
|
||||
```
|
||||
|
||||
```bash#openSUSE Tumbleweed
|
||||
$ sudo zypper install clang19 lld19 llvm19
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
If none of the above solutions apply, you will have to install it [manually](https://github.com/llvm/llvm-project/releases/tag/llvmorg-19.1.7).
|
||||
|
||||
Make sure Clang/LLVM 19 is in your path:
|
||||
|
||||
```bash
|
||||
$ which clang-19
|
||||
```
|
||||
|
||||
If not, run this to manually add it:
|
||||
|
||||
{% codetabs group="os" %}
|
||||
|
||||
```bash#macOS (Homebrew)
|
||||
# use fish_add_path if you're using fish
|
||||
# use path+="$(brew --prefix llvm@19)/bin" if you are using zsh
|
||||
$ export PATH="$(brew --prefix llvm@19)/bin:$PATH"
|
||||
```
|
||||
|
||||
```bash#Arch
|
||||
# use fish_add_path if you're using fish
|
||||
$ export PATH="$PATH:/usr/lib/llvm19/bin"
|
||||
```
|
||||
|
||||
{% /codetabs %}
|
||||
|
||||
> ⚠️ Ubuntu distributions (<= 20.04) may require installation of the C++ standard library independently. See the [troubleshooting section](#span-file-not-found-on-ubuntu) for more information.
|
||||
|
||||
## Building Bun
|
||||
|
||||
After cloning the repository, run the following command to build. This may take a while as it will clone submodules and build dependencies.
|
||||
|
||||
```bash
|
||||
$ bun run build
|
||||
```
|
||||
|
||||
The binary will be located at `./build/debug/bun-debug`. It is recommended to add this to your `$PATH`. To verify the build worked, let's print the version number on the development build of Bun.
|
||||
|
||||
```bash
|
||||
$ build/debug/bun-debug --version
|
||||
x.y.z_debug
|
||||
```
|
||||
|
||||
## VSCode
|
||||
|
||||
VSCode is the recommended IDE for working on Bun, as it has been configured. Once opening, you can run `Extensions: Show Recommended Extensions` to install the recommended extensions for Zig and C++. ZLS is automatically configured.
|
||||
|
||||
If you use a different editor, make sure that you tell ZLS to use the automatically installed Zig compiler, which is located at `./vendor/zig/zig.exe`. The filename is `zig.exe` so that it works as expected on Windows, but it still works on macOS/Linux (it just has a surprising file extension).
|
||||
|
||||
We recommend adding `./build/debug` to your `$PATH` so that you can run `bun-debug` in your terminal:
|
||||
|
||||
```sh
|
||||
$ bun-debug
|
||||
```
|
||||
|
||||
## Running debug builds
|
||||
|
||||
The `bd` package.json script compiles and runs a debug build of Bun, only printing the output of the build process if it fails.
|
||||
|
||||
```sh
|
||||
$ bun bd <args>
|
||||
$ bun bd test foo.test.ts
|
||||
$ bun bd ./foo.ts
|
||||
```
|
||||
|
||||
## Code generation scripts
|
||||
|
||||
Several code generation scripts are used during Bun's build process. These are run automatically when changes are made to certain files.
|
||||
|
||||
In particular, these are:
|
||||
|
||||
- `./src/codegen/generate-jssink.ts` -- Generates `build/debug/codegen/JSSink.cpp`, `build/debug/codegen/JSSink.h` which implement various classes for interfacing with `ReadableStream`. This is internally how `FileSink`, `ArrayBufferSink`, `"type": "direct"` streams and other code related to streams works.
|
||||
- `./src/codegen/generate-classes.ts` -- Generates `build/debug/codegen/ZigGeneratedClasses*`, which generates Zig & C++ bindings for JavaScriptCore classes implemented in Zig. In `**/*.classes.ts` files, we define the interfaces for various classes, methods, prototypes, getters/setters etc which the code generator reads to generate boilerplate code implementing the JavaScript objects in C++ and wiring them up to Zig
|
||||
- `./src/codegen/bundle-modules.ts` -- Bundles built-in modules like `node:fs`, `bun:ffi` into files we can include in the final binary. In development, these can be reloaded without rebuilding Zig (you still need to run `bun run build`, but it re-reads the transpiled files from disk afterwards). In release builds, these are embedded into the binary.
|
||||
- `./src/codegen/bundle-functions.ts` -- Bundles globally-accessible functions implemented in JavaScript/TypeScript like `ReadableStream`, `WritableStream`, and a handful more. These are used similarly to the builtin modules, but the output more closely aligns with what WebKit/Safari does for Safari's built-in functions so that we can copy-paste the implementations from WebKit as a starting point.
|
||||
|
||||
## Modifying ESM modules
|
||||
|
||||
Certain modules like `node:fs`, `node:stream`, `bun:sqlite`, and `ws` are implemented in JavaScript. These live in `src/js/{node,bun,thirdparty}` files and are pre-bundled using Bun.
|
||||
|
||||
## Release build
|
||||
|
||||
To compile a release build of Bun, run:
|
||||
|
||||
```bash
|
||||
$ bun run build:release
|
||||
```
|
||||
|
||||
The binary will be located at `./build/release/bun` and `./build/release/bun-profile`.
|
||||
|
||||
### Download release build from pull requests
|
||||
|
||||
To save you time spent building a release build locally, we provide a way to run release builds from pull requests. This is useful for manually testing changes in a release build before they are merged.
|
||||
|
||||
To run a release build from a pull request, you can use the `bun-pr` npm package:
|
||||
|
||||
```sh
|
||||
bunx bun-pr <pr-number>
|
||||
bunx bun-pr <branch-name>
|
||||
bunx bun-pr "https://github.com/oven-sh/bun/pull/1234566"
|
||||
```
|
||||
|
||||
This will download the release build from the pull request and add it to `$PATH` as `bun-${pr-number}`. You can then run the build with `bun-${pr-number}`.
|
||||
|
||||
```sh
|
||||
bun-1234566 --version
|
||||
```
|
||||
|
||||
This works by downloading the release build from the GitHub Actions artifacts on the linked pull request. You may need the `gh` CLI installed to authenticate with GitHub.
|
||||
|
||||
## Valgrind
|
||||
|
||||
On Linux, valgrind can help find memory issues.
|
||||
|
||||
Keep in mind:
|
||||
|
||||
- JavaScriptCore doesn't support valgrind. It will report spurious errors.
|
||||
- Valgrind is slow
|
||||
- Mimalloc will sometimes cause spurious errors when debug build is enabled
|
||||
|
||||
You'll need a very recent version of Valgrind due to DWARF 5 debug symbols. You may need to manually compile Valgrind instead of using it from your Linux package manager.
|
||||
|
||||
`--fair-sched=try` is necessary if running multithreaded code in Bun (such as the bundler). Otherwise it will hang.
|
||||
|
||||
```bash
|
||||
$ valgrind --fair-sched=try --track-origins=yes bun-debug <args>
|
||||
```
|
||||
|
||||
## Building WebKit locally + Debug mode of JSC
|
||||
|
||||
WebKit is not cloned by default (to save time and disk space). To clone and build WebKit locally, run:
|
||||
|
||||
```bash
|
||||
# Clone WebKit into ./vendor/WebKit
|
||||
$ git clone https://github.com/oven-sh/WebKit vendor/WebKit
|
||||
|
||||
# Check out the commit hash specified in `set(WEBKIT_VERSION <commit_hash>)` in cmake/tools/SetupWebKit.cmake
|
||||
$ git -C vendor/WebKit checkout <commit_hash>
|
||||
|
||||
# Make a debug build of JSC. This will output build artifacts in ./vendor/WebKit/WebKitBuild/Debug
|
||||
# Optionally, you can use `make jsc` for a release build
|
||||
$ make jsc-debug && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# After an initial run of `make jsc-debug`, you can rebuild JSC with:
|
||||
$ cmake --build vendor/WebKit/WebKitBuild/Debug --target jsc && rm vendor/WebKit/WebKitBuild/Debug/JavaScriptCore/DerivedSources/inspector/InspectorProtocolObjects.h
|
||||
|
||||
# Build bun with the local JSC build
|
||||
$ bun run build:local
|
||||
```
|
||||
|
||||
Using `bun run build:local` will build Bun in the `./build/debug-local` directory (instead of `./build/debug`), you'll have to change a couple of places to use this new directory:
|
||||
|
||||
- The first line in [`src/js/builtins.d.ts`](/src/js/builtins.d.ts)
|
||||
- The `CompilationDatabase` line in [`.clangd` config](/.clangd) should be `CompilationDatabase: build/debug-local`
|
||||
- In [`build.zig`](/build.zig), the `codegen_path` option should be `build/debug-local/codegen` (instead of `build/debug/codegen`)
|
||||
- In [`.vscode/launch.json`](/.vscode/launch.json), many configurations use `./build/debug/`, change them as you see fit
|
||||
|
||||
Note that the WebKit folder, including build artifacts, is 8GB+ in size.
|
||||
|
||||
If you are using a JSC debug build and using VScode, make sure to run the `C/C++: Select a Configuration` command to configure intellisense to find the debug headers.
|
||||
|
||||
Note that if you change make changes to our [WebKit fork](https://github.com/oven-sh/WebKit), you will also have to change [`SetupWebKit.cmake`](/cmake/tools/SetupWebKit.cmake) to point to the commit hash.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### 'span' file not found on Ubuntu
|
||||
|
||||
> ⚠️ Please note that the instructions below are specific to issues occurring on Ubuntu. It is unlikely that the same issues will occur on other Linux distributions.
|
||||
|
||||
The Clang compiler typically uses the `libstdc++` C++ standard library by default. `libstdc++` is the default C++ Standard Library implementation provided by the GNU Compiler Collection (GCC). While Clang may link against the `libc++` library, this requires explicitly providing the `-stdlib` flag when running Clang.
|
||||
|
||||
Bun relies on C++20 features like `std::span`, which are not available in GCC versions lower than 11. GCC 10 doesn't have all of the C++20 features implemented. As a result, running `make setup` may fail with the following error:
|
||||
|
||||
```
|
||||
fatal error: 'span' file not found
|
||||
#include <span>
|
||||
^~~~~~
|
||||
```
|
||||
|
||||
The issue may manifest when initially running `bun setup` as Clang being unable to compile a simple program:
|
||||
|
||||
```
|
||||
The C++ compiler
|
||||
|
||||
"/usr/bin/clang++-19"
|
||||
|
||||
is not able to compile a simple test program.
|
||||
```
|
||||
|
||||
To fix the error, we need to update the GCC version to 11. To do this, we'll need to check if the latest version is available in the distribution's official repositories or use a third-party repository that provides GCC 11 packages. Here are general steps:
|
||||
|
||||
```bash
|
||||
$ sudo apt update
|
||||
$ sudo apt install gcc-11 g++-11
|
||||
# If the above command fails with `Unable to locate package gcc-11` we need
|
||||
# to add the APT repository
|
||||
$ sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||
# Now run `apt install` again
|
||||
$ sudo apt install gcc-11 g++-11
|
||||
```
|
||||
|
||||
Now, we need to set GCC 11 as the default compiler:
|
||||
|
||||
```bash
|
||||
$ sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-11 100
|
||||
$ sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-11 100
|
||||
```
|
||||
|
||||
### libarchive
|
||||
|
||||
If you see an error on macOS when compiling `libarchive`, run:
|
||||
|
||||
```bash
|
||||
$ brew install pkg-config
|
||||
```
|
||||
|
||||
### macOS `library not found for -lSystem`
|
||||
|
||||
If you see this error when compiling, run:
|
||||
|
||||
```bash
|
||||
$ xcode-select --install
|
||||
```
|
||||
|
||||
### Cannot find `libatomic.a`
|
||||
|
||||
Bun defaults to linking `libatomic` statically, as not all systems have it. If you are building on a distro that does not have a static libatomic available, you can run the following command to enable dynamic linking:
|
||||
|
||||
```bash
|
||||
$ bun run build -DUSE_STATIC_LIBATOMIC=OFF
|
||||
```
|
||||
|
||||
The built version of Bun may not work on other systems if compiled this way.
|
||||
|
||||
### ccache conflicts with building TinyCC on macOS
|
||||
|
||||
If you run into issues with `ccache` when building TinyCC, try reinstalling ccache
|
||||
|
||||
```bash
|
||||
brew uninstall ccache
|
||||
brew install ccache
|
||||
```
|
||||
|
||||
## Using bun-debug
|
||||
|
||||
- Disable logging: `BUN_DEBUG_QUIET_LOGS=1 bun-debug ...` (to disable all debug logging)
|
||||
- Enable logging for a specific zig scope: `BUN_DEBUG_EventLoop=1 bun-debug ...` (to allow `std.log.scoped(.EventLoop)`)
|
||||
- Bun transpiles every file it runs, to see the actual executed source in a debug build find it in `/tmp/bun-debug-src/...path/to/file`, for example the transpiled version of `/home/bun/index.ts` would be in `/tmp/bun-debug-src/home/bun/index.ts`
|
||||
73
LICENSE.md
73
LICENSE.md
@@ -1,73 +0,0 @@
|
||||
Bun itself is MIT-licensed.
|
||||
|
||||
## JavaScriptCore
|
||||
|
||||
Bun statically links JavaScriptCore (and WebKit) which is LGPL-2 licensed. WebCore files from WebKit are also licensed under LGPL2. Per LGPL2:
|
||||
|
||||
> (1) If you statically link against an LGPL’d library, you must also provide your application in an object (not necessarily source) format, so that a user has the opportunity to modify the library and relink the application.
|
||||
|
||||
You can find the patched version of WebKit used by Bun here: <https://github.com/oven-sh/webkit>. If you would like to relink Bun with changes:
|
||||
|
||||
- `git submodule update --init --recursive`
|
||||
- `make jsc`
|
||||
- `zig build`
|
||||
|
||||
This compiles JavaScriptCore, compiles Bun’s `.cpp` bindings for JavaScriptCore (which are the object files using JavaScriptCore) and outputs a new `bun` binary with your changes.
|
||||
|
||||
## Linked libraries
|
||||
|
||||
Bun statically links these libraries:
|
||||
|
||||
| Library | License |
|
||||
|---------|---------|
|
||||
| [`boringssl`](https://boringssl.googlesource.com/boringssl/) | [several licenses](https://boringssl.googlesource.com/boringssl/+/refs/heads/master/LICENSE) |
|
||||
| [`brotli`](https://github.com/google/brotli) | MIT |
|
||||
| [`libarchive`](https://github.com/libarchive/libarchive) | [several licenses](https://github.com/libarchive/libarchive/blob/master/COPYING) |
|
||||
| [`lol-html`](https://github.com/cloudflare/lol-html/tree/master/c-api) | BSD 3-Clause |
|
||||
| [`mimalloc`](https://github.com/microsoft/mimalloc) | MIT |
|
||||
| [`picohttp`](https://github.com/h2o/picohttpparser) | dual-licensed under the Perl License or the MIT License |
|
||||
| [`zstd`](https://github.com/facebook/zstd) | dual-licensed under the BSD License or GPLv2 license |
|
||||
| [`simdutf`](https://github.com/simdutf/simdutf) | Apache 2.0 |
|
||||
| [`tinycc`](https://github.com/tinycc/tinycc) | LGPL v2.1 |
|
||||
| [`uSockets`](https://github.com/uNetworking/uSockets) | Apache 2.0 |
|
||||
| [`zlib-cloudflare`](https://github.com/cloudflare/zlib) | zlib |
|
||||
| [`c-ares`](https://github.com/c-ares/c-ares) | MIT licensed |
|
||||
| [`libicu`](https://github.com/unicode-org/icu) 72 | [license here](https://github.com/unicode-org/icu/blob/main/icu4c/LICENSE) |
|
||||
| [`libbase64`](https://github.com/aklomp/base64/blob/master/LICENSE) | BSD 2-Clause |
|
||||
| [`libuv`](https://github.com/libuv/libuv) (on Windows) | MIT |
|
||||
| [`libdeflate`](https://github.com/ebiggers/libdeflate) | MIT |
|
||||
| A fork of [`uWebsockets`](https://github.com/jarred-sumner/uwebsockets) | Apache 2.0 licensed |
|
||||
| Parts of [Tigerbeetle's IO code](https://github.com/tigerbeetle/tigerbeetle/blob/532c8b70b9142c17e07737ab6d3da68d7500cbca/src/io/windows.zig#L1) | Apache 2.0 licensed |
|
||||
|
||||
## Polyfills
|
||||
|
||||
For compatibility reasons, the following packages are embedded into Bun's binary and injected if imported.
|
||||
|
||||
| Package | License |
|
||||
|---------|---------|
|
||||
| [`assert`](https://npmjs.com/package/assert) | MIT |
|
||||
| [`browserify-zlib`](https://npmjs.com/package/browserify-zlib) | MIT |
|
||||
| [`buffer`](https://npmjs.com/package/buffer) | MIT |
|
||||
| [`constants-browserify`](https://npmjs.com/package/constants-browserify) | MIT |
|
||||
| [`crypto-browserify`](https://npmjs.com/package/crypto-browserify) | MIT |
|
||||
| [`domain-browser`](https://npmjs.com/package/domain-browser) | MIT |
|
||||
| [`events`](https://npmjs.com/package/events) | MIT |
|
||||
| [`https-browserify`](https://npmjs.com/package/https-browserify) | MIT |
|
||||
| [`os-browserify`](https://npmjs.com/package/os-browserify) | MIT |
|
||||
| [`path-browserify`](https://npmjs.com/package/path-browserify) | MIT |
|
||||
| [`process`](https://npmjs.com/package/process) | MIT |
|
||||
| [`punycode`](https://npmjs.com/package/punycode) | MIT |
|
||||
| [`querystring-es3`](https://npmjs.com/package/querystring-es3) | MIT |
|
||||
| [`stream-browserify`](https://npmjs.com/package/stream-browserify) | MIT |
|
||||
| [`stream-http`](https://npmjs.com/package/stream-http) | MIT |
|
||||
| [`string_decoder`](https://npmjs.com/package/string_decoder) | MIT |
|
||||
| [`timers-browserify`](https://npmjs.com/package/timers-browserify) | MIT |
|
||||
| [`tty-browserify`](https://npmjs.com/package/tty-browserify) | MIT |
|
||||
| [`url`](https://npmjs.com/package/url) | MIT |
|
||||
| [`util`](https://npmjs.com/package/util) | MIT |
|
||||
| [`vm-browserify`](https://npmjs.com/package/vm-browserify) | MIT |
|
||||
|
||||
## Additional credits
|
||||
|
||||
- Bun's JS transpiler, CSS lexer, and Node.js module resolver source code is a Zig port of [@evanw](https://github.com/evanw)’s [esbuild](https://github.com/evanw/esbuild) project.
|
||||
- Credit to [@kipply](https://github.com/kipply) for the name "Bun"!
|
||||
607
README.md
607
README.md
@@ -1,435 +1,236 @@
|
||||
<p align="center">
|
||||
<a href="https://bun.sh"><img src="https://github.com/user-attachments/assets/50282090-adfd-4ddb-9e27-c30753c6b161" alt="Logo" height=170></a>
|
||||
</p>
|
||||
<h1 align="center">Bun</h1>
|
||||
# Speedy - a fast web bundler & JavaScript runtime environment
|
||||
|
||||
<p align="center">
|
||||
<a href="https://bun.sh/discord" target="_blank"><img height=20 src="https://img.shields.io/discord/876711213126520882" /></a>
|
||||
<img src="https://img.shields.io/github/stars/oven-sh/bun" alt="stars">
|
||||
<a href="https://twitter.com/jarredsumner/status/1542824445810642946"><img src="https://img.shields.io/static/v1?label=speed&message=fast&color=success" alt="Bun speed" /></a>
|
||||
</p>
|
||||
Speedy bundles & transpiles JavaScript, TypeScript, and CSS. Speedy is probably the fastest bundler out today.
|
||||
|
||||
<div align="center">
|
||||
<a href="https://bun.sh/docs">Documentation</a>
|
||||
<span> • </span>
|
||||
<a href="https://discord.com/invite/CXdq2DP29u">Discord</a>
|
||||
<span> • </span>
|
||||
<a href="https://github.com/oven-sh/bun/issues/new">Issues</a>
|
||||
<span> • </span>
|
||||
<a href="https://github.com/oven-sh/bun/issues/159">Roadmap</a>
|
||||
<br />
|
||||
</div>
|
||||
### Speed hacking
|
||||
|
||||
### [Read the docs →](https://bun.sh/docs)
|
||||
Here are some techniques Speedy uses to make your builds shockingly fast. Most are small wins. Some are big.
|
||||
|
||||
## What is Bun?
|
||||
#### Compare comptime-known strings by nearest `(u64 || u32 || u16 || u8)`-sized integer
|
||||
|
||||
Bun is an all-in-one toolkit for JavaScript and TypeScript apps. It ships as a single executable called `bun`.
|
||||
Parsers & lexers search source code for many tokens. For JavaScript, some of these include:
|
||||
|
||||
At its core is the _Bun runtime_, a fast JavaScript runtime designed as **a drop-in replacement for Node.js**. It's written in Zig and powered by JavaScriptCore under the hood, dramatically reducing startup times and memory usage.
|
||||
- `yield`
|
||||
- `await`
|
||||
- `for`
|
||||
- `of`
|
||||
- `in`
|
||||
- `while`
|
||||
|
||||
You get the idea.
|
||||
|
||||
When you know the string you're looking for ahead of time, it's faster to compare multiple characters at a time than a single character.
|
||||
|
||||
<details>
|
||||
|
||||
<summary>Here's a function that does this. This is used in many places throughout the code.</summary>
|
||||
|
||||
```zig
|
||||
pub fn eqlComptime(self: string, comptime alt: anytype) bool {
|
||||
switch (comptime alt.len) {
|
||||
0 => {
|
||||
@compileError("Invalid size passed to eqlComptime");
|
||||
},
|
||||
2 => {
|
||||
const check = std.mem.readIntNative(u16, alt[0..alt.len]);
|
||||
return self.len == alt.len and std.mem.readIntNative(u16, self[0..2]) == check;
|
||||
},
|
||||
1, 3 => {
|
||||
if (alt.len != self.len) {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline for (alt) |c, i| {
|
||||
if (self[i] != c) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
4 => {
|
||||
const check = std.mem.readIntNative(u32, alt[0..alt.len]);
|
||||
return self.len == alt.len and std.mem.readIntNative(u32, self[0..4]) == check;
|
||||
},
|
||||
6 => {
|
||||
const first = std.mem.readIntNative(u32, alt[0..4]);
|
||||
const second = std.mem.readIntNative(u16, alt[4..6]);
|
||||
|
||||
return self.len == alt.len and first == std.mem.readIntNative(u32, self[0..4]) and
|
||||
second == std.mem.readIntNative(u16, self[4..6]);
|
||||
},
|
||||
5, 7 => {
|
||||
const check = std.mem.readIntNative(u32, alt[0..4]);
|
||||
if (self.len != alt.len or std.mem.readIntNative(u32, self[0..4]) != check) {
|
||||
return false;
|
||||
}
|
||||
const remainder = self[4..];
|
||||
inline for (alt[4..]) |c, i| {
|
||||
if (remainder[i] != c) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
8 => {
|
||||
const check = std.mem.readIntNative(u64, alt[0..alt.len]);
|
||||
return self.len == alt.len and std.mem.readIntNative(u64, self[0..8]) == check;
|
||||
},
|
||||
9...11 => {
|
||||
const first = std.mem.readIntNative(u64, alt[0..8]);
|
||||
|
||||
if (self.len != alt.len or first != std.mem.readIntNative(u64, self[0..8])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline for (alt[8..]) |c, i| {
|
||||
if (self[i + 8] != c) return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
12 => {
|
||||
const first = std.mem.readIntNative(u64, alt[0..8]);
|
||||
const second = std.mem.readIntNative(u32, alt[8..12]);
|
||||
return (self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u32, self[8..12]);
|
||||
},
|
||||
13...15 => {
|
||||
const first = std.mem.readIntNative(u64, alt[0..8]);
|
||||
const second = std.mem.readIntNative(u32, alt[8..12]);
|
||||
|
||||
if (self.len != alt.len or first != std.mem.readIntNative(u64, self[0..8]) or second != std.mem.readIntNative(u32, self[8..12])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
inline for (alt[13..]) |c, i| {
|
||||
if (self[i + 13] != c) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
16 => {
|
||||
const first = std.mem.readIntNative(u64, alt[0..8]);
|
||||
const second = std.mem.readIntNative(u64, alt[8..15]);
|
||||
return (self.len == alt.len) and first == std.mem.readIntNative(u64, self[0..8]) and second == std.mem.readIntNative(u64, self[8..16]);
|
||||
},
|
||||
else => {
|
||||
@compileError(alt ++ " is too long.");
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
#### Skip decoding UTF-16 when safe
|
||||
|
||||
JavaScript engines represent strings as UTF-16 byte arrays. That means every character in a string occupies at least 2 bytes of memory. Most applications (and documents) use UTF-8, which uses at least 1 byte of memory.
|
||||
|
||||
Most JavaScript bundlers store JavaScript strings as UTF-16, either because the bundler is written in JavaScript, or to simplify the code.
|
||||
|
||||
It's much faster to reuse the memory from reading the contents of the JavaScript source and store the byte offset + length into the source file, than allocating a new string for each JavaScript string. This is safe when the string doesn't have a codepoint > 127, which mostly means `A-Za-z0-9` and punctuation. Most JavaScript strings don't use lots of emoji, so this saves from many tiny allocations.
|
||||
|
||||
#### CSS ~Parser~ Scanner
|
||||
|
||||
Speedy (currently) does not have a CSS Parser. But, it still processes CSS.
|
||||
|
||||
Most CSS processors work something like this:
|
||||
|
||||
1. Copy the CSS source code to a byte array
|
||||
2. Iterate through every unicode codepoint, generating tokens (lexing)
|
||||
3. Convert each token into an AST node (parsing)
|
||||
4. Perform 1 or more passes over the AST. For tools like PostCSS, every plugin typically adds 1+ passes over the AST. (visiting)
|
||||
5. Print the source code (printing)
|
||||
|
||||
Speedy's CSS Scanner scans, rewrites, and prints CSS in a single pass without generating an AST. It works like this:
|
||||
|
||||
1. Copy the CSS source code to a byte array
|
||||
2. Iterate through every unicode codepoint, searching for lines starting with `@import` or property values with `url(`
|
||||
3. For each url or import:
|
||||
1. Flush everything before the url or import
|
||||
2. Resolve the import URL
|
||||
3. Write the import URL
|
||||
4. When end of file is reached, flush to disk.
|
||||
|
||||
Speedy's CSS Scanner is about 56x faster than PostCSS with the `postcss-import` and `postcss-url` plugins enabled (and sourcemaps disabled). On the other hand, auto-prefixing and minification won't work. Minifying whitespace is possible with some modifications, but minifiying CSS syntax correctly needs an AST.
|
||||
|
||||
This approach is fast, but not without tradeoffs!
|
||||
|
||||
Speedy's CSS Scanner is based on esbuild's CSS Lexer. Thank you @evanwallace.
|
||||
|
||||
#### Compile-time generated JavaScript Parsers
|
||||
|
||||
At the time of writing, there are 8 different comptime-generated variations of Speedy's JavaScript parser.
|
||||
|
||||
```zig
|
||||
pub fn NewParser(
|
||||
comptime is_typescript_enabled: bool,
|
||||
comptime is_jsx_enabled: bool,
|
||||
comptime only_scan_imports_and_do_not_visit: bool,
|
||||
) type {
|
||||
```
|
||||
|
||||
When this is `false`, branches that only apply to parsing TypeScript are removed.
|
||||
|
||||
```zig
|
||||
comptime is_typescript_enabled: bool,
|
||||
```
|
||||
|
||||
**Performance impact: +2%?**
|
||||
|
||||
```bash
|
||||
bun run index.tsx # TS and JSX supported out-of-the-box
|
||||
❯ hyperfine "../../build/macos-x86_64/esdev node_modules/react-dom/cjs/react-dom.development.js --resolve=disable" "../../esdev.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable" --min-runs=500
|
||||
Benchmark #1: ../../build/macos-x86_64/esdev node_modules/react-dom/cjs/react-dom.development.js --resolve=disable
|
||||
Time (mean ± σ): 25.1 ms ± 1.1 ms [User: 20.4 ms, System: 3.1 ms]
|
||||
Range (min … max): 23.5 ms … 31.7 ms 500 runs
|
||||
|
||||
Benchmark #2: ../../esdev.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable
|
||||
Time (mean ± σ): 25.6 ms ± 1.3 ms [User: 20.9 ms, System: 3.1 ms]
|
||||
Range (min … max): 24.1 ms … 39.7 ms 500 runs
|
||||
'../../build/macos-x86_64/esdev node_modules/react-dom/cjs/react-dom.development.js --resolve=disable' ran
|
||||
1.02 ± 0.07 times faster than '../../esdev.before-comptime-js-parser node_modules/react-dom/cjs/react-dom.development.js --resolve=disable'
|
||||
```
|
||||
|
||||
The `bun` command-line tool also implements a test runner, script runner, and Node.js-compatible package manager. Instead of 1,000 node_modules for development, you only need `bun`. Bun's built-in tools are significantly faster than existing options and usable in existing Node.js projects with little to no changes.
|
||||
When this is `false`, branches that only apply to parsing JSX are removed.
|
||||
|
||||
```bash
|
||||
bun test # run tests
|
||||
bun run start # run the `start` script in `package.json`
|
||||
bun install <pkg> # install a package
|
||||
bunx cowsay 'Hello, world!' # execute a package
|
||||
```zig
|
||||
comptime is_jsx_enabled: bool,
|
||||
```
|
||||
|
||||
## Install
|
||||
This is only used for application code when generating `node_modules.jsb`. This skips the visiting pass. It reduces parsing time by about 30%, but the source code cannot be printed without visiting. It's only useful for scanning `import` and `require`.
|
||||
|
||||
Bun supports Linux (x64 & arm64), macOS (x64 & Apple Silicon) and Windows (x64).
|
||||
|
||||
> **Linux users** — Kernel version 5.6 or higher is strongly recommended, but the minimum is 5.1.
|
||||
|
||||
```sh
|
||||
# with install script (recommended)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
# on windows
|
||||
powershell -c "irm bun.sh/install.ps1 | iex"
|
||||
|
||||
# with npm
|
||||
npm install -g bun
|
||||
|
||||
# with Homebrew
|
||||
brew tap oven-sh/bun
|
||||
brew install bun
|
||||
|
||||
# with Docker
|
||||
docker pull oven/bun
|
||||
docker run --rm --init --ulimit memlock=-1:-1 oven/bun
|
||||
```zig
|
||||
comptime only_scan_imports_and_do_not_visit: bool,
|
||||
```
|
||||
|
||||
### Upgrade
|
||||
At runtime, Speedy chooses the appropriate JavaScript parser to use based on the `loader`. In practical terms, this moves all the branches checking whether a parsing step should be run from inside several tight loops to just once, before parsing starts.
|
||||
|
||||
To upgrade to the latest version of Bun, run:
|
||||
#### Max out per-process file handle limit automatically, leave file handles open.
|
||||
|
||||
```sh
|
||||
bun upgrade
|
||||
```
|
||||
**Performance impact: +5%**
|
||||
|
||||
Bun automatically releases a canary build on every commit to `main`. To upgrade to the latest canary build, run:
|
||||
It turns out, lots of time is spent opening and closing file handles. This is feature flagged off on Windows.
|
||||
|
||||
```sh
|
||||
bun upgrade --canary
|
||||
```
|
||||
This also enabled a kqueue-based File System watcher on macOS. FSEvents, the more common macOS File System watcher uses kqueue internally to watch directories. Watching file handles is faster than directories. It was surprising to learn that none of the popular filesystem watchers for Node.js adjust the process ulimit, leaving many developers to deal with "too many open file handles" errors.
|
||||
|
||||
[View canary build](https://github.com/oven-sh/bun/releases/tag/canary)
|
||||
### Architecture
|
||||
|
||||
## Quick links
|
||||
#### The Speedy Bundle Format
|
||||
|
||||
- Intro
|
||||
TODO: document
|
||||
|
||||
- [What is Bun?](https://bun.sh/docs/index)
|
||||
- [Installation](https://bun.sh/docs/installation)
|
||||
- [Quickstart](https://bun.sh/docs/quickstart)
|
||||
- [TypeScript](https://bun.sh/docs/typescript)
|
||||
### Hot Module Reloading
|
||||
|
||||
- Templating
|
||||
Speedy's Hot Module Reloader uses a custom binary protocol that's around 8x more space efficient than other bundlers.
|
||||
|
||||
- [`bun init`](https://bun.sh/docs/cli/init)
|
||||
- [`bun create`](https://bun.sh/docs/cli/bun-create)
|
||||
- File change notifications cost 9 bytes.
|
||||
- Build metadata costs 13 bytes + length of the module path that was rebuilt + size of the built file.
|
||||
|
||||
- CLI
|
||||
For comparison, Vite's HMR implementation uses 104 bytes + length of the module path that was rebuilt (at the time of writing)
|
||||
|
||||
- [`bun upgrade`](https://bun.sh/docs/cli/bun-upgrade)
|
||||
#### Instant CSS
|
||||
|
||||
- Runtime
|
||||
When using `<link rel="stylesheet">`, Speedy HMR "just works", with zero configuration and without modifying HTML.
|
||||
|
||||
- [`bun run`](https://bun.sh/docs/cli/run)
|
||||
- [File types (Loaders)](https://bun.sh/docs/runtime/loaders)
|
||||
- [TypeScript](https://bun.sh/docs/runtime/typescript)
|
||||
- [JSX](https://bun.sh/docs/runtime/jsx)
|
||||
- [Environment variables](https://bun.sh/docs/runtime/env)
|
||||
- [Bun APIs](https://bun.sh/docs/runtime/bun-apis)
|
||||
- [Web APIs](https://bun.sh/docs/runtime/web-apis)
|
||||
- [Node.js compatibility](https://bun.sh/docs/runtime/nodejs-apis)
|
||||
- [Single-file executable](https://bun.sh/docs/bundler/executables)
|
||||
- [Plugins](https://bun.sh/docs/runtime/plugins)
|
||||
- [Watch mode / Hot Reloading](https://bun.sh/docs/runtime/hot)
|
||||
- [Module resolution](https://bun.sh/docs/runtime/modules)
|
||||
- [Auto-install](https://bun.sh/docs/runtime/autoimport)
|
||||
- [bunfig.toml](https://bun.sh/docs/runtime/bunfig)
|
||||
- [Debugger](https://bun.sh/docs/runtime/debugger)
|
||||
- [$ Shell](https://bun.sh/docs/runtime/shell)
|
||||
Here's how:
|
||||
|
||||
- Package manager
|
||||
- On page load, CSS files are built per request
|
||||
- When you make a change to a local CSS file, a file change notification is pushed over the websocket connection to the browser (HMR client)
|
||||
- For the first update, instead of asking for a new file to build, it asks for a list of files that the file within the `<link rel="stylesheet">` imports, and any those `@import`, recursively. If `index.css` imports `link.css` and `link.css` imports `colors.css`, that list will include `index.css`, `link.css`, and `colors.css`.
|
||||
- Preserving import order, the link tags are replaced in a single DOM update. This time, an additional query string flag is added `?noimport` which tells the Speedy CSS Scanner to remove any `@import` statements from the built CSS file.
|
||||
|
||||
- [`bun install`](https://bun.sh/docs/cli/install)
|
||||
- [`bun add`](https://bun.sh/docs/cli/add)
|
||||
- [`bun remove`](https://bun.sh/docs/cli/remove)
|
||||
- [`bun update`](https://bun.sh/docs/cli/update)
|
||||
- [`bun link`](https://bun.sh/docs/cli/link)
|
||||
- [`bun unlink`](https://bun.sh/docs/cli/unlink)
|
||||
- [`bun pm`](https://bun.sh/docs/cli/pm)
|
||||
- [`bun outdated`](https://bun.sh/docs/cli/outdated)
|
||||
- [`bun publish`](https://bun.sh/docs/cli/publish)
|
||||
- [`bun patch`](https://bun.sh/docs/install/patch)
|
||||
- [`bun patch-commit`](https://bun.sh/docs/cli/patch-commit)
|
||||
- [Global cache](https://bun.sh/docs/install/cache)
|
||||
- [Workspaces](https://bun.sh/docs/install/workspaces)
|
||||
- [Lifecycle scripts](https://bun.sh/docs/install/lifecycle)
|
||||
- [Filter](https://bun.sh/docs/cli/filter)
|
||||
- [Lockfile](https://bun.sh/docs/install/lockfile)
|
||||
- [Scopes and registries](https://bun.sh/docs/install/registries)
|
||||
- [Overrides and resolutions](https://bun.sh/docs/install/overrides)
|
||||
- [`.npmrc`](https://bun.sh/docs/install/npmrc)
|
||||
|
||||
- Bundler
|
||||
|
||||
- [`Bun.build`](https://bun.sh/docs/bundler)
|
||||
- [Loaders](https://bun.sh/docs/bundler/loaders)
|
||||
- [Plugins](https://bun.sh/docs/bundler/plugins)
|
||||
- [Macros](https://bun.sh/docs/bundler/macros)
|
||||
- [vs esbuild](https://bun.sh/docs/bundler/vs-esbuild)
|
||||
- [Single-file executable](https://bun.sh/docs/bundler/executables)
|
||||
- [CSS](https://bun.sh/docs/bundler/css)
|
||||
- [HTML](https://bun.sh/docs/bundler/html)
|
||||
- [Hot Module Replacement (HMR)](https://bun.sh/docs/bundler/hmr)
|
||||
- [Full-stack with HTML imports](https://bun.sh/docs/bundler/fullstack)
|
||||
|
||||
- Test runner
|
||||
|
||||
- [`bun test`](https://bun.sh/docs/cli/test)
|
||||
- [Writing tests](https://bun.sh/docs/test/writing)
|
||||
- [Watch mode](https://bun.sh/docs/test/hot)
|
||||
- [Lifecycle hooks](https://bun.sh/docs/test/lifecycle)
|
||||
- [Mocks](https://bun.sh/docs/test/mocks)
|
||||
- [Snapshots](https://bun.sh/docs/test/snapshots)
|
||||
- [Dates and times](https://bun.sh/docs/test/time)
|
||||
- [DOM testing](https://bun.sh/docs/test/dom)
|
||||
- [Code coverage](https://bun.sh/docs/test/coverage)
|
||||
- [Configuration](https://bun.sh/docs/test/configuration)
|
||||
- [Discovery](https://bun.sh/docs/test/discovery)
|
||||
- [Reporters](https://bun.sh/docs/test/reporters)
|
||||
- [Runtime Behavior](https://bun.sh/docs/test/runtime-behavior)
|
||||
|
||||
- Package runner
|
||||
|
||||
- [`bunx`](https://bun.sh/docs/cli/bunx)
|
||||
|
||||
- API
|
||||
|
||||
- [HTTP server (`Bun.serve`)](https://bun.sh/docs/api/http)
|
||||
- [WebSockets](https://bun.sh/docs/api/websockets)
|
||||
- [Workers](https://bun.sh/docs/api/workers)
|
||||
- [Binary data](https://bun.sh/docs/api/binary-data)
|
||||
- [Streams](https://bun.sh/docs/api/streams)
|
||||
- [File I/O (`Bun.file`)](https://bun.sh/docs/api/file-io)
|
||||
- [import.meta](https://bun.sh/docs/api/import-meta)
|
||||
- [SQLite (`bun:sqlite`)](https://bun.sh/docs/api/sqlite)
|
||||
- [PostgreSQL (`Bun.sql`)](https://bun.sh/docs/api/sql)
|
||||
- [Redis (`Bun.redis`)](https://bun.sh/docs/api/redis)
|
||||
- [S3 Client (`Bun.s3`)](https://bun.sh/docs/api/s3)
|
||||
- [FileSystemRouter](https://bun.sh/docs/api/file-system-router)
|
||||
- [TCP sockets](https://bun.sh/docs/api/tcp)
|
||||
- [UDP sockets](https://bun.sh/docs/api/udp)
|
||||
- [Globals](https://bun.sh/docs/api/globals)
|
||||
- [$ Shell](https://bun.sh/docs/runtime/shell)
|
||||
- [Child processes (spawn)](https://bun.sh/docs/api/spawn)
|
||||
- [Transpiler (`Bun.Transpiler`)](https://bun.sh/docs/api/transpiler)
|
||||
- [Hashing](https://bun.sh/docs/api/hashing)
|
||||
- [Colors (`Bun.color`)](https://bun.sh/docs/api/color)
|
||||
- [Console](https://bun.sh/docs/api/console)
|
||||
- [FFI (`bun:ffi`)](https://bun.sh/docs/api/ffi)
|
||||
- [C Compiler (`bun:ffi` cc)](https://bun.sh/docs/api/cc)
|
||||
- [HTMLRewriter](https://bun.sh/docs/api/html-rewriter)
|
||||
- [Testing (`bun:test`)](https://bun.sh/docs/api/test)
|
||||
- [Cookies (`Bun.Cookie`)](https://bun.sh/docs/api/cookie)
|
||||
- [Utils](https://bun.sh/docs/api/utils)
|
||||
- [Node-API](https://bun.sh/docs/api/node-api)
|
||||
- [Glob (`Bun.Glob`)](https://bun.sh/docs/api/glob)
|
||||
- [Semver (`Bun.semver`)](https://bun.sh/docs/api/semver)
|
||||
- [DNS](https://bun.sh/docs/api/dns)
|
||||
- [fetch API extensions](https://bun.sh/docs/api/fetch)
|
||||
|
||||
## Guides
|
||||
|
||||
- Binary
|
||||
|
||||
- [Convert a Blob to a string](https://bun.sh/guides/binary/blob-to-string)
|
||||
- [Convert a Buffer to a blob](https://bun.sh/guides/binary/buffer-to-blob)
|
||||
- [Convert a Blob to a DataView](https://bun.sh/guides/binary/blob-to-dataview)
|
||||
- [Convert a Buffer to a string](https://bun.sh/guides/binary/buffer-to-string)
|
||||
- [Convert a Blob to a ReadableStream](https://bun.sh/guides/binary/blob-to-stream)
|
||||
- [Convert a Blob to a Uint8Array](https://bun.sh/guides/binary/blob-to-typedarray)
|
||||
- [Convert a DataView to a string](https://bun.sh/guides/binary/dataview-to-string)
|
||||
- [Convert a Uint8Array to a Blob](https://bun.sh/guides/binary/typedarray-to-blob)
|
||||
- [Convert a Blob to an ArrayBuffer](https://bun.sh/guides/binary/blob-to-arraybuffer)
|
||||
- [Convert an ArrayBuffer to a Blob](https://bun.sh/guides/binary/arraybuffer-to-blob)
|
||||
- [Convert a Buffer to a Uint8Array](https://bun.sh/guides/binary/buffer-to-typedarray)
|
||||
- [Convert a Uint8Array to a Buffer](https://bun.sh/guides/binary/typedarray-to-buffer)
|
||||
- [Convert a Uint8Array to a string](https://bun.sh/guides/binary/typedarray-to-string)
|
||||
- [Convert a Buffer to an ArrayBuffer](https://bun.sh/guides/binary/buffer-to-arraybuffer)
|
||||
- [Convert an ArrayBuffer to a Buffer](https://bun.sh/guides/binary/arraybuffer-to-buffer)
|
||||
- [Convert an ArrayBuffer to a string](https://bun.sh/guides/binary/arraybuffer-to-string)
|
||||
- [Convert a Uint8Array to a DataView](https://bun.sh/guides/binary/typedarray-to-dataview)
|
||||
- [Convert a Buffer to a ReadableStream](https://bun.sh/guides/binary/buffer-to-readablestream)
|
||||
- [Convert a Uint8Array to an ArrayBuffer](https://bun.sh/guides/binary/typedarray-to-arraybuffer)
|
||||
- [Convert an ArrayBuffer to a Uint8Array](https://bun.sh/guides/binary/arraybuffer-to-typedarray)
|
||||
- [Convert an ArrayBuffer to an array of numbers](https://bun.sh/guides/binary/arraybuffer-to-array)
|
||||
- [Convert a Uint8Array to a ReadableStream](https://bun.sh/guides/binary/typedarray-to-readablestream)
|
||||
|
||||
- Ecosystem
|
||||
|
||||
- [Use React and JSX](https://bun.sh/guides/ecosystem/react)
|
||||
- [Use EdgeDB with Bun](https://bun.sh/guides/ecosystem/edgedb)
|
||||
- [Use Prisma with Bun](https://bun.sh/guides/ecosystem/prisma)
|
||||
- [Add Sentry to a Bun app](https://bun.sh/guides/ecosystem/sentry)
|
||||
- [Create a Discord bot](https://bun.sh/guides/ecosystem/discordjs)
|
||||
- [Run Bun as a daemon with PM2](https://bun.sh/guides/ecosystem/pm2)
|
||||
- [Use Drizzle ORM with Bun](https://bun.sh/guides/ecosystem/drizzle)
|
||||
- [Build an app with Nuxt and Bun](https://bun.sh/guides/ecosystem/nuxt)
|
||||
- [Build an app with Qwik and Bun](https://bun.sh/guides/ecosystem/qwik)
|
||||
- [Build an app with Astro and Bun](https://bun.sh/guides/ecosystem/astro)
|
||||
- [Build an app with Remix and Bun](https://bun.sh/guides/ecosystem/remix)
|
||||
- [Build a frontend using Vite and Bun](https://bun.sh/guides/ecosystem/vite)
|
||||
- [Build an app with Next.js and Bun](https://bun.sh/guides/ecosystem/nextjs)
|
||||
- [Run Bun as a daemon with systemd](https://bun.sh/guides/ecosystem/systemd)
|
||||
- [Deploy a Bun application on Render](https://bun.sh/guides/ecosystem/render)
|
||||
- [Build an HTTP server using Hono and Bun](https://bun.sh/guides/ecosystem/hono)
|
||||
- [Build an app with SvelteKit and Bun](https://bun.sh/guides/ecosystem/sveltekit)
|
||||
- [Build an app with SolidStart and Bun](https://bun.sh/guides/ecosystem/solidstart)
|
||||
- [Build an HTTP server using Elysia and Bun](https://bun.sh/guides/ecosystem/elysia)
|
||||
- [Build an HTTP server using StricJS and Bun](https://bun.sh/guides/ecosystem/stric)
|
||||
- [Containerize a Bun application with Docker](https://bun.sh/guides/ecosystem/docker)
|
||||
- [Build an HTTP server using Express and Bun](https://bun.sh/guides/ecosystem/express)
|
||||
- [Use Neon Postgres through Drizzle ORM](https://bun.sh/guides/ecosystem/neon-drizzle)
|
||||
- [Server-side render (SSR) a React component](https://bun.sh/guides/ecosystem/ssr-react)
|
||||
- [Read and write data to MongoDB using Mongoose and Bun](https://bun.sh/guides/ecosystem/mongoose)
|
||||
- [Use Neon's Serverless Postgres with Bun](https://bun.sh/guides/ecosystem/neon-serverless-postgres)
|
||||
|
||||
- HTMLRewriter
|
||||
|
||||
- [Extract links from a webpage using HTMLRewriter](https://bun.sh/guides/html-rewriter/extract-links)
|
||||
- [Extract social share images and Open Graph tags](https://bun.sh/guides/html-rewriter/extract-social-meta)
|
||||
|
||||
- HTTP
|
||||
|
||||
- [Hot reload an HTTP server](https://bun.sh/guides/http/hot)
|
||||
- [Common HTTP server usage](https://bun.sh/guides/http/server)
|
||||
- [Write a simple HTTP server](https://bun.sh/guides/http/simple)
|
||||
- [Configure TLS on an HTTP server](https://bun.sh/guides/http/tls)
|
||||
- [Send an HTTP request using fetch](https://bun.sh/guides/http/fetch)
|
||||
- [Proxy HTTP requests using fetch()](https://bun.sh/guides/http/proxy)
|
||||
- [Start a cluster of HTTP servers](https://bun.sh/guides/http/cluster)
|
||||
- [Stream a file as an HTTP Response](https://bun.sh/guides/http/stream-file)
|
||||
- [fetch with unix domain sockets in Bun](https://bun.sh/guides/http/fetch-unix)
|
||||
- [Upload files via HTTP using FormData](https://bun.sh/guides/http/file-uploads)
|
||||
- [Streaming HTTP Server with Async Iterators](https://bun.sh/guides/http/stream-iterator)
|
||||
- [Streaming HTTP Server with Node.js Streams](https://bun.sh/guides/http/stream-node-streams-in-bun)
|
||||
|
||||
- Install
|
||||
|
||||
- [Add a dependency](https://bun.sh/guides/install/add)
|
||||
- [Add a Git dependency](https://bun.sh/guides/install/add-git)
|
||||
- [Add a peer dependency](https://bun.sh/guides/install/add-peer)
|
||||
- [Add a trusted dependency](https://bun.sh/guides/install/trusted)
|
||||
- [Add a development dependency](https://bun.sh/guides/install/add-dev)
|
||||
- [Add a tarball dependency](https://bun.sh/guides/install/add-tarball)
|
||||
- [Add an optional dependency](https://bun.sh/guides/install/add-optional)
|
||||
- [Generate a yarn-compatible lockfile](https://bun.sh/guides/install/yarnlock)
|
||||
- [Configuring a monorepo using workspaces](https://bun.sh/guides/install/workspaces)
|
||||
- [Install a package under a different name](https://bun.sh/guides/install/npm-alias)
|
||||
- [Install dependencies with Bun in GitHub Actions](https://bun.sh/guides/install/cicd)
|
||||
- [Using bun install with Artifactory](https://bun.sh/guides/install/jfrog-artifactory)
|
||||
- [Configure git to diff Bun's lockb lockfile](https://bun.sh/guides/install/git-diff-bun-lockfile)
|
||||
- [Override the default npm registry for bun install](https://bun.sh/guides/install/custom-registry)
|
||||
- [Using bun install with an Azure Artifacts npm registry](https://bun.sh/guides/install/azure-artifacts)
|
||||
- [Migrate from npm install to bun install](https://bun.sh/guides/install/from-npm-install-to-bun-install)
|
||||
- [Configure a private registry for an organization scope with bun install](https://bun.sh/guides/install/registry-scope)
|
||||
|
||||
- Process
|
||||
|
||||
- [Read from stdin](https://bun.sh/guides/process/stdin)
|
||||
- [Listen for CTRL+C](https://bun.sh/guides/process/ctrl-c)
|
||||
- [Spawn a child process](https://bun.sh/guides/process/spawn)
|
||||
- [Listen to OS signals](https://bun.sh/guides/process/os-signals)
|
||||
- [Parse command-line arguments](https://bun.sh/guides/process/argv)
|
||||
- [Read stderr from a child process](https://bun.sh/guides/process/spawn-stderr)
|
||||
- [Read stdout from a child process](https://bun.sh/guides/process/spawn-stdout)
|
||||
- [Get the process uptime in nanoseconds](https://bun.sh/guides/process/nanoseconds)
|
||||
- [Spawn a child process and communicate using IPC](https://bun.sh/guides/process/ipc)
|
||||
|
||||
- Read file
|
||||
|
||||
- [Read a JSON file](https://bun.sh/guides/read-file/json)
|
||||
- [Check if a file exists](https://bun.sh/guides/read-file/exists)
|
||||
- [Read a file as a string](https://bun.sh/guides/read-file/string)
|
||||
- [Read a file to a Buffer](https://bun.sh/guides/read-file/buffer)
|
||||
- [Get the MIME type of a file](https://bun.sh/guides/read-file/mime)
|
||||
- [Watch a directory for changes](https://bun.sh/guides/read-file/watch)
|
||||
- [Read a file as a ReadableStream](https://bun.sh/guides/read-file/stream)
|
||||
- [Read a file to a Uint8Array](https://bun.sh/guides/read-file/uint8array)
|
||||
- [Read a file to an ArrayBuffer](https://bun.sh/guides/read-file/arraybuffer)
|
||||
|
||||
- Runtime
|
||||
|
||||
- [Delete files](https://bun.sh/guides/runtime/delete-file)
|
||||
- [Run a Shell Command](https://bun.sh/guides/runtime/shell)
|
||||
- [Import a JSON file](https://bun.sh/guides/runtime/import-json)
|
||||
- [Import a TOML file](https://bun.sh/guides/runtime/import-toml)
|
||||
- [Set a time zone in Bun](https://bun.sh/guides/runtime/timezone)
|
||||
- [Set environment variables](https://bun.sh/guides/runtime/set-env)
|
||||
- [Re-map import paths](https://bun.sh/guides/runtime/tsconfig-paths)
|
||||
- [Delete directories](https://bun.sh/guides/runtime/delete-directory)
|
||||
- [Read environment variables](https://bun.sh/guides/runtime/read-env)
|
||||
- [Import a HTML file as text](https://bun.sh/guides/runtime/import-html)
|
||||
- [Install and run Bun in GitHub Actions](https://bun.sh/guides/runtime/cicd)
|
||||
- [Debugging Bun with the web debugger](https://bun.sh/guides/runtime/web-debugger)
|
||||
- [Install TypeScript declarations for Bun](https://bun.sh/guides/runtime/typescript)
|
||||
- [Debugging Bun with the VS Code extension](https://bun.sh/guides/runtime/vscode-debugger)
|
||||
- [Inspect memory usage using V8 heap snapshots](https://bun.sh/guides/runtime/heap-snapshot)
|
||||
- [Define and replace static globals & constants](https://bun.sh/guides/runtime/define-constant)
|
||||
- [Codesign a single-file JavaScript executable on macOS](https://bun.sh/guides/runtime/codesign-macos-executable)
|
||||
|
||||
- Streams
|
||||
|
||||
- [Convert a ReadableStream to JSON](https://bun.sh/guides/streams/to-json)
|
||||
- [Convert a ReadableStream to a Blob](https://bun.sh/guides/streams/to-blob)
|
||||
- [Convert a ReadableStream to a Buffer](https://bun.sh/guides/streams/to-buffer)
|
||||
- [Convert a ReadableStream to a string](https://bun.sh/guides/streams/to-string)
|
||||
- [Convert a ReadableStream to a Uint8Array](https://bun.sh/guides/streams/to-typedarray)
|
||||
- [Convert a ReadableStream to an array of chunks](https://bun.sh/guides/streams/to-array)
|
||||
- [Convert a Node.js Readable to JSON](https://bun.sh/guides/streams/node-readable-to-json)
|
||||
- [Convert a ReadableStream to an ArrayBuffer](https://bun.sh/guides/streams/to-arraybuffer)
|
||||
- [Convert a Node.js Readable to a Blob](https://bun.sh/guides/streams/node-readable-to-blob)
|
||||
- [Convert a Node.js Readable to a string](https://bun.sh/guides/streams/node-readable-to-string)
|
||||
- [Convert a Node.js Readable to an Uint8Array](https://bun.sh/guides/streams/node-readable-to-uint8array)
|
||||
- [Convert a Node.js Readable to an ArrayBuffer](https://bun.sh/guides/streams/node-readable-to-arraybuffer)
|
||||
|
||||
- Test
|
||||
|
||||
- [Spy on methods in `bun test`](https://bun.sh/guides/test/spy-on)
|
||||
- [Bail early with the Bun test runner](https://bun.sh/guides/test/bail)
|
||||
- [Mock functions in `bun test`](https://bun.sh/guides/test/mock-functions)
|
||||
- [Run tests in watch mode with Bun](https://bun.sh/guides/test/watch-mode)
|
||||
- [Use snapshot testing in `bun test`](https://bun.sh/guides/test/snapshot)
|
||||
- [Skip tests with the Bun test runner](https://bun.sh/guides/test/skip-tests)
|
||||
- [Using Testing Library with Bun](https://bun.sh/guides/test/testing-library)
|
||||
- [Update snapshots in `bun test`](https://bun.sh/guides/test/update-snapshots)
|
||||
- [Run your tests with the Bun test runner](https://bun.sh/guides/test/run-tests)
|
||||
- [Set the system time in Bun's test runner](https://bun.sh/guides/test/mock-clock)
|
||||
- [Set a per-test timeout with the Bun test runner](https://bun.sh/guides/test/timeout)
|
||||
- [Migrate from Jest to Bun's test runner](https://bun.sh/guides/test/migrate-from-jest)
|
||||
- [Write browser DOM tests with Bun and happy-dom](https://bun.sh/guides/test/happy-dom)
|
||||
- [Mark a test as a "todo" with the Bun test runner](https://bun.sh/guides/test/todo-tests)
|
||||
- [Re-run tests multiple times with the Bun test runner](https://bun.sh/guides/test/rerun-each)
|
||||
- [Generate code coverage reports with the Bun test runner](https://bun.sh/guides/test/coverage)
|
||||
- [import, require, and test Svelte components with bun test](https://bun.sh/guides/test/svelte-test)
|
||||
- [Set a code coverage threshold with the Bun test runner](https://bun.sh/guides/test/coverage-threshold)
|
||||
|
||||
- Util
|
||||
|
||||
- [Generate a UUID](https://bun.sh/guides/util/javascript-uuid)
|
||||
- [Hash a password](https://bun.sh/guides/util/hash-a-password)
|
||||
- [Escape an HTML string](https://bun.sh/guides/util/escape-html)
|
||||
- [Get the current Bun version](https://bun.sh/guides/util/version)
|
||||
- [Encode and decode base64 strings](https://bun.sh/guides/util/base64)
|
||||
- [Compress and decompress data with gzip](https://bun.sh/guides/util/gzip)
|
||||
- [Sleep for a fixed number of milliseconds](https://bun.sh/guides/util/sleep)
|
||||
- [Detect when code is executed with Bun](https://bun.sh/guides/util/detect-bun)
|
||||
- [Check if two objects are deeply equal](https://bun.sh/guides/util/deep-equals)
|
||||
- [Compress and decompress data with DEFLATE](https://bun.sh/guides/util/deflate)
|
||||
- [Get the absolute path to the current entrypoint](https://bun.sh/guides/util/main)
|
||||
- [Get the directory of the current file](https://bun.sh/guides/util/import-meta-dir)
|
||||
- [Check if the current file is the entrypoint](https://bun.sh/guides/util/entrypoint)
|
||||
- [Get the file name of the current file](https://bun.sh/guides/util/import-meta-file)
|
||||
- [Convert a file URL to an absolute path](https://bun.sh/guides/util/file-url-to-path)
|
||||
- [Convert an absolute path to a file URL](https://bun.sh/guides/util/path-to-file-url)
|
||||
- [Get the absolute path of the current file](https://bun.sh/guides/util/import-meta-path)
|
||||
- [Get the path to an executable bin file](https://bun.sh/guides/util/which-path-to-executable-bin)
|
||||
|
||||
- WebSocket
|
||||
|
||||
- [Build a publish-subscribe WebSocket server](https://bun.sh/guides/websocket/pubsub)
|
||||
- [Build a simple WebSocket server](https://bun.sh/guides/websocket/simple)
|
||||
- [Enable compression for WebSocket messages](https://bun.sh/guides/websocket/compression)
|
||||
- [Set per-socket contextual data on a WebSocket](https://bun.sh/guides/websocket/context)
|
||||
|
||||
- Write file
|
||||
|
||||
- [Delete a file](https://bun.sh/guides/write-file/unlink)
|
||||
- [Write to stdout](https://bun.sh/guides/write-file/stdout)
|
||||
- [Write a file to stdout](https://bun.sh/guides/write-file/cat)
|
||||
- [Write a Blob to a file](https://bun.sh/guides/write-file/blob)
|
||||
- [Write a string to a file](https://bun.sh/guides/write-file/basic)
|
||||
- [Append content to a file](https://bun.sh/guides/write-file/append)
|
||||
- [Write a file incrementally](https://bun.sh/guides/write-file/filesink)
|
||||
- [Write a Response to a file](https://bun.sh/guides/write-file/response)
|
||||
- [Copy a file to another location](https://bun.sh/guides/write-file/file-cp)
|
||||
- [Write a ReadableStream to a file](https://bun.sh/guides/write-file/stream)
|
||||
|
||||
## Contributing
|
||||
|
||||
Refer to the [Project > Contributing](https://bun.sh/docs/project/contributing) guide to start contributing to Bun.
|
||||
|
||||
## License
|
||||
|
||||
Refer to the [Project > License](https://bun.sh/docs/project/licensing) page for information about Bun's licensing.
|
||||
While this approach today is fast, there are more scalable alternatives to large codebases worth considering (such as, a bundling format that supports loading individual files unbundled). This may change in the near future.
|
||||
|
||||
12
SECURITY.md
12
SECURITY.md
@@ -1,12 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 1.x.x | :white_check_mark: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Report any discovered vulnerabilities to the Bun team by emailing `security@bun.sh`. Your report will acknowledged within 5 days, and a team member will be assigned as the primary handler. To the greatest extent possible, the security team will endeavor to keep you informed of the progress being made towards a fix and full announcement, and may ask for additional information or guidance surrounding the reported issue.
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
BUN=bun
|
||||
DENO=deno
|
||||
NODE=node
|
||||
2
bench/.gitignore
vendored
2
bench/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
ffi/src/target
|
||||
ffi/src/*.node
|
||||
@@ -1,13 +0,0 @@
|
||||
```bash
|
||||
npm install
|
||||
|
||||
bun run ffi
|
||||
bun run log
|
||||
bun run gzip
|
||||
bun run async
|
||||
bun run sqlite
|
||||
|
||||
# to use custom version of bun/deno/node binary
|
||||
BUN=path/to/bun bun run ffi
|
||||
# or edit .env file
|
||||
```
|
||||
@@ -1,3 +0,0 @@
|
||||
BUN=bun
|
||||
DENO=deno
|
||||
NODE=node
|
||||
@@ -1,30 +0,0 @@
|
||||
// https://github.com/nodejs/node/issues/34493
|
||||
import { AsyncLocalStorage } from "async_hooks";
|
||||
const asyncLocalStorage = new AsyncLocalStorage();
|
||||
|
||||
// let fn = () => Promise.resolve(2).then(() => new Promise(resolve => queueMicrotask(resolve)));
|
||||
let fn = () => /test/.test("test");
|
||||
|
||||
let runWithExpiry = async (expiry, fn) => {
|
||||
let iterations = 0;
|
||||
while (Date.now() < expiry) {
|
||||
await fn();
|
||||
iterations++;
|
||||
}
|
||||
return iterations;
|
||||
};
|
||||
|
||||
console.log(`Performed ${await runWithExpiry(Date.now() + 1000, fn)} iterations to warmup`);
|
||||
|
||||
let withAls;
|
||||
await asyncLocalStorage.run(123, async () => {
|
||||
withAls = await runWithExpiry(Date.now() + 45000, fn);
|
||||
console.log(`Performed ${withAls} iterations (with ALS enabled)`);
|
||||
});
|
||||
|
||||
asyncLocalStorage.disable();
|
||||
|
||||
let withoutAls = await runWithExpiry(Date.now() + 45000, fn);
|
||||
console.log(`Performed ${withoutAls} iterations (with ALS disabled)`);
|
||||
|
||||
console.log("ALS penalty: " + Math.round((1 - withAls / withoutAls) * 10000) / 100 + "%");
|
||||
@@ -1,7 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
bench("await 1", async () => await 1);
|
||||
|
||||
await run();
|
||||
@@ -1,7 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
bench("await 1", async () => await 1);
|
||||
|
||||
await run();
|
||||
@@ -1,7 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("sync", () => {});
|
||||
bench("async", async () => {});
|
||||
bench("await 1", async () => await 1);
|
||||
|
||||
await run();
|
||||
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"scripts": {
|
||||
"deps": "exit 0",
|
||||
"build": "exit 0",
|
||||
"bench:bun": "bun bun.js",
|
||||
"bench:node": "node node.mjs",
|
||||
"bench:deno": "deno run -A --unstable deno.js",
|
||||
"bench": "bun run bench:bun && bun run bench:node && bun run bench:deno"
|
||||
}
|
||||
}
|
||||
416
bench/bun.lock
416
bench/bun.lock
@@ -1,416 +0,0 @@
|
||||
{
|
||||
"lockfileVersion": 1,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"name": "bench",
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.16.10",
|
||||
"@babel/preset-react": "^7.16.7",
|
||||
"@babel/standalone": "^7.24.7",
|
||||
"@swc/core": "^1.2.133",
|
||||
"benchmark": "^2.1.4",
|
||||
"braces": "^3.0.2",
|
||||
"color": "^4.2.3",
|
||||
"esbuild": "^0.14.12",
|
||||
"eventemitter3": "^5.0.0",
|
||||
"execa": "^8.0.1",
|
||||
"fast-glob": "3.3.1",
|
||||
"fdir": "^6.1.0",
|
||||
"mitata": "^1.0.25",
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"string-width": "7.1.0",
|
||||
"tinycolor2": "^1.6.0",
|
||||
"zx": "^7.2.3",
|
||||
},
|
||||
"devDependencies": {
|
||||
"fast-deep-equal": "^3.1.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@ampproject/remapping": ["@ampproject/remapping@2.2.0", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.1.0", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w=="],
|
||||
|
||||
"@babel/code-frame": ["@babel/code-frame@7.18.6", "", { "dependencies": { "@babel/highlight": "^7.18.6" } }, "sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q=="],
|
||||
|
||||
"@babel/compat-data": ["@babel/compat-data@7.20.14", "", {}, "sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw=="],
|
||||
|
||||
"@babel/core": ["@babel/core@7.20.12", "", { "dependencies": { "@ampproject/remapping": "^2.1.0", "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.20.7", "@babel/helper-compilation-targets": "^7.20.7", "@babel/helper-module-transforms": "^7.20.11", "@babel/helpers": "^7.20.7", "@babel/parser": "^7.20.7", "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.12", "@babel/types": "^7.20.7", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.2", "semver": "^6.3.0" } }, "sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg=="],
|
||||
|
||||
"@babel/generator": ["@babel/generator@7.20.14", "", { "dependencies": { "@babel/types": "^7.20.7", "@jridgewell/gen-mapping": "^0.3.2", "jsesc": "^2.5.1" } }, "sha512-AEmuXHdcD3A52HHXxaTmYlb8q/xMEhoRP67B3T4Oq7lbmSoqroMZzjnGj3+i1io3pdnF8iBYVu4Ilj+c4hBxYg=="],
|
||||
|
||||
"@babel/helper-annotate-as-pure": ["@babel/helper-annotate-as-pure@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA=="],
|
||||
|
||||
"@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.20.7", "", { "dependencies": { "@babel/compat-data": "^7.20.5", "@babel/helper-validator-option": "^7.18.6", "browserslist": "^4.21.3", "lru-cache": "^5.1.1", "semver": "^6.3.0" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ=="],
|
||||
|
||||
"@babel/helper-environment-visitor": ["@babel/helper-environment-visitor@7.18.9", "", {}, "sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg=="],
|
||||
|
||||
"@babel/helper-function-name": ["@babel/helper-function-name@7.19.0", "", { "dependencies": { "@babel/template": "^7.18.10", "@babel/types": "^7.19.0" } }, "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w=="],
|
||||
|
||||
"@babel/helper-hoist-variables": ["@babel/helper-hoist-variables@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q=="],
|
||||
|
||||
"@babel/helper-module-imports": ["@babel/helper-module-imports@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA=="],
|
||||
|
||||
"@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.20.11", "", { "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-simple-access": "^7.20.2", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.19.1", "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.10", "@babel/types": "^7.20.7" } }, "sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg=="],
|
||||
|
||||
"@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.20.2", "", {}, "sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ=="],
|
||||
|
||||
"@babel/helper-simple-access": ["@babel/helper-simple-access@7.20.2", "", { "dependencies": { "@babel/types": "^7.20.2" } }, "sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA=="],
|
||||
|
||||
"@babel/helper-split-export-declaration": ["@babel/helper-split-export-declaration@7.18.6", "", { "dependencies": { "@babel/types": "^7.18.6" } }, "sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA=="],
|
||||
|
||||
"@babel/helper-string-parser": ["@babel/helper-string-parser@7.19.4", "", {}, "sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw=="],
|
||||
|
||||
"@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.19.1", "", {}, "sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w=="],
|
||||
|
||||
"@babel/helper-validator-option": ["@babel/helper-validator-option@7.18.6", "", {}, "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw=="],
|
||||
|
||||
"@babel/helpers": ["@babel/helpers@7.20.13", "", { "dependencies": { "@babel/template": "^7.20.7", "@babel/traverse": "^7.20.13", "@babel/types": "^7.20.7" } }, "sha512-nzJ0DWCL3gB5RCXbUO3KIMMsBY2Eqbx8mBpKGE/02PgyRQFcPQLbkQ1vyy596mZLaP+dAfD+R4ckASzNVmW3jg=="],
|
||||
|
||||
"@babel/highlight": ["@babel/highlight@7.18.6", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.18.6", "chalk": "^2.0.0", "js-tokens": "^4.0.0" } }, "sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g=="],
|
||||
|
||||
"@babel/parser": ["@babel/parser@7.20.15", "", { "bin": "./bin/babel-parser.js" }, "sha512-DI4a1oZuf8wC+oAJA9RW6ga3Zbe8RZFt7kD9i4qAspz3I/yHet1VvC3DiSy/fsUvv5pvJuNPh0LPOdCcqinDPg=="],
|
||||
|
||||
"@babel/plugin-syntax-jsx": ["@babel/plugin-syntax-jsx@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q=="],
|
||||
|
||||
"@babel/plugin-transform-react-display-name": ["@babel/plugin-transform-react-display-name@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-TV4sQ+T013n61uMoygyMRm+xf04Bd5oqFpv2jAEQwSZ8NwQA7zeRPg1LMVg2PWi3zWBz+CLKD+v5bcpZ/BS0aA=="],
|
||||
|
||||
"@babel/plugin-transform-react-jsx": ["@babel/plugin-transform-react-jsx@7.20.13", "", { "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-module-imports": "^7.18.6", "@babel/helper-plugin-utils": "^7.20.2", "@babel/plugin-syntax-jsx": "^7.18.6", "@babel/types": "^7.20.7" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-MmTZx/bkUrfJhhYAYt3Urjm+h8DQGrPrnKQ94jLo7NLuOU+T89a7IByhKmrb8SKhrIYIQ0FN0CHMbnFRen4qNw=="],
|
||||
|
||||
"@babel/plugin-transform-react-jsx-development": ["@babel/plugin-transform-react-jsx-development@7.18.6", "", { "dependencies": { "@babel/plugin-transform-react-jsx": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-SA6HEjwYFKF7WDjWcMcMGUimmw/nhNRDWxr+KaLSCrkD/LMDBvWRmHAYgE1HDeF8KUuI8OAu+RT6EOtKxSW2qA=="],
|
||||
|
||||
"@babel/plugin-transform-react-pure-annotations": ["@babel/plugin-transform-react-pure-annotations@7.18.6", "", { "dependencies": { "@babel/helper-annotate-as-pure": "^7.18.6", "@babel/helper-plugin-utils": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-I8VfEPg9r2TRDdvnHgPepTKvuRomzA8+u+nhY7qSI1fR2hRNebasZEETLyM5mAUr0Ku56OkXJ0I7NHJnO6cJiQ=="],
|
||||
|
||||
"@babel/preset-react": ["@babel/preset-react@7.18.6", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.18.6", "@babel/helper-validator-option": "^7.18.6", "@babel/plugin-transform-react-display-name": "^7.18.6", "@babel/plugin-transform-react-jsx": "^7.18.6", "@babel/plugin-transform-react-jsx-development": "^7.18.6", "@babel/plugin-transform-react-pure-annotations": "^7.18.6" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg=="],
|
||||
|
||||
"@babel/standalone": ["@babel/standalone@7.24.7", "", {}, "sha512-QRIRMJ2KTeN+vt4l9OjYlxDVXEpcor1Z6V7OeYzeBOw6Q8ew9oMTHjzTx8s6ClsZO7wVf6JgTRutihatN6K0yA=="],
|
||||
|
||||
"@babel/template": ["@babel/template@7.20.7", "", { "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7" } }, "sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw=="],
|
||||
|
||||
"@babel/traverse": ["@babel/traverse@7.20.13", "", { "dependencies": { "@babel/code-frame": "^7.18.6", "@babel/generator": "^7.20.7", "@babel/helper-environment-visitor": "^7.18.9", "@babel/helper-function-name": "^7.19.0", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", "@babel/parser": "^7.20.13", "@babel/types": "^7.20.7", "debug": "^4.1.0", "globals": "^11.1.0" } }, "sha512-kMJXfF0T6DIS9E8cgdLCSAL+cuCK+YEZHWiLK0SXpTo8YRj5lpJu3CDNKiIBCne4m9hhTIqUg6SYTAI39tAiVQ=="],
|
||||
|
||||
"@babel/types": ["@babel/types@7.20.7", "", { "dependencies": { "@babel/helper-string-parser": "^7.19.4", "@babel/helper-validator-identifier": "^7.19.1", "to-fast-properties": "^2.0.0" } }, "sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg=="],
|
||||
|
||||
"@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw=="],
|
||||
|
||||
"@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.1.1", "", { "dependencies": { "@jridgewell/set-array": "^1.0.0", "@jridgewell/sourcemap-codec": "^1.4.10" } }, "sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w=="],
|
||||
|
||||
"@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.0", "", {}, "sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w=="],
|
||||
|
||||
"@jridgewell/set-array": ["@jridgewell/set-array@1.1.2", "", {}, "sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw=="],
|
||||
|
||||
"@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.4.14", "", {}, "sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw=="],
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.17", "", { "dependencies": { "@jridgewell/resolve-uri": "3.1.0", "@jridgewell/sourcemap-codec": "1.4.14" } }, "sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g=="],
|
||||
|
||||
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
|
||||
|
||||
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
|
||||
|
||||
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
|
||||
|
||||
"@swc/core": ["@swc/core@1.3.35", "", { "dependencies": { "@swc/core-darwin-arm64": "1.3.35", "@swc/core-darwin-x64": "1.3.35", "@swc/core-linux-arm-gnueabihf": "1.3.35", "@swc/core-linux-arm64-gnu": "1.3.35", "@swc/core-linux-arm64-musl": "1.3.35", "@swc/core-linux-x64-gnu": "1.3.35", "@swc/core-linux-x64-musl": "1.3.35", "@swc/core-win32-arm64-msvc": "1.3.35", "@swc/core-win32-ia32-msvc": "1.3.35", "@swc/core-win32-x64-msvc": "1.3.35" } }, "sha512-KmiBin0XSVzJhzX19zTiCqmLslZ40Cl7zqskJcTDeIrRhfgKdiAsxzYUanJgMJIRjYtl9Kcg1V/Ip2o2wL8v3w=="],
|
||||
|
||||
"@swc/core-darwin-arm64": ["@swc/core-darwin-arm64@1.3.35", "", { "os": "darwin", "cpu": "arm64" }, "sha512-zQUFkHx4gZpu0uo2IspvPnKsz8bsdXd5bC33xwjtoAI1cpLerDyqo4v2zIahEp+FdKZjyVsLHtfJiQiA1Qka3A=="],
|
||||
|
||||
"@swc/core-darwin-x64": ["@swc/core-darwin-x64@1.3.35", "", { "os": "darwin", "cpu": "x64" }, "sha512-oOSkSGWtALovaw22lNevKD434OQTPf8X+dVPvPMrJXJpJ34dWDlFWpLntoc+arvKLNZ7LQmTuk8rR1hkrAY7cw=="],
|
||||
|
||||
"@swc/core-linux-arm-gnueabihf": ["@swc/core-linux-arm-gnueabihf@1.3.35", "", { "os": "linux", "cpu": "arm" }, "sha512-Yie8k00O6O8BCATS/xeKStquV4OYSskUGRDXBQVDw1FrE23PHaSeHCgg4q6iNZjJzXCOJbaTCKnYoIDn9DMf7A=="],
|
||||
|
||||
"@swc/core-linux-arm64-gnu": ["@swc/core-linux-arm64-gnu@1.3.35", "", { "os": "linux", "cpu": "arm64" }, "sha512-Zlv3WHa/4x2p51HSvjUWXHfSe1Gl2prqImUZJc8NZOlj75BFzVuR0auhQ+LbwvIQ3gaA1LODX9lyS9wXL3yjxA=="],
|
||||
|
||||
"@swc/core-linux-arm64-musl": ["@swc/core-linux-arm64-musl@1.3.35", "", { "os": "linux", "cpu": "arm64" }, "sha512-u6tCYsrSyZ8U+4jLMA/O82veBfLy2aUpn51WxQaeH7wqZGy9TGSJXoO8vWxARQ6b72vjsnKDJHP4MD8hFwcctg=="],
|
||||
|
||||
"@swc/core-linux-x64-gnu": ["@swc/core-linux-x64-gnu@1.3.35", "", { "os": "linux", "cpu": "x64" }, "sha512-Dtxf2IbeH7XlNhP1Qt2/MvUPkpEbn7hhGfpSRs4ot8D3Vf5QEX4S/QtC1OsFWuciiYgHAT1Ybjt4xZic9DSkmA=="],
|
||||
|
||||
"@swc/core-linux-x64-musl": ["@swc/core-linux-x64-musl@1.3.35", "", { "os": "linux", "cpu": "x64" }, "sha512-4XavNJ60GprjpTiESCu5daJUnmErixPAqDitJSMu4TV32LNIE8G00S9pDLXinDTW1rgcGtQdq1NLkNRmwwovtg=="],
|
||||
|
||||
"@swc/core-win32-arm64-msvc": ["@swc/core-win32-arm64-msvc@1.3.35", "", { "os": "win32", "cpu": "arm64" }, "sha512-dNGfKCUSX2M4qVyaS80Lyos0FkXyHRCvrdQ2Y4Hrg3FVokiuw3yY6fLohpUfQ5ws3n2A39dh7jGDeh34+l0sGA=="],
|
||||
|
||||
"@swc/core-win32-ia32-msvc": ["@swc/core-win32-ia32-msvc@1.3.35", "", { "os": "win32", "cpu": "ia32" }, "sha512-ChuPSrDR+JBf7S7dEKPicnG8A3bM0uWPsW2vG+V2wH4iNfNxKVemESHosmYVeEZXqMpomNMvLyeHep1rjRsc0Q=="],
|
||||
|
||||
"@swc/core-win32-x64-msvc": ["@swc/core-win32-x64-msvc@1.3.35", "", { "os": "win32", "cpu": "x64" }, "sha512-/RvphT4WfuGfIK84Ha0dovdPrKB1bW/mc+dtdmhv2E3EGkNc5FoueNwYmXWRimxnU7X0X7IkcRhyKB4G5DeAmg=="],
|
||||
|
||||
"@types/fs-extra": ["@types/fs-extra@11.0.4", "", { "dependencies": { "@types/jsonfile": "*", "@types/node": "*" } }, "sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ=="],
|
||||
|
||||
"@types/jsonfile": ["@types/jsonfile@6.1.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ=="],
|
||||
|
||||
"@types/minimist": ["@types/minimist@1.2.5", "", {}, "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag=="],
|
||||
|
||||
"@types/node": ["@types/node@18.19.8", "", { "dependencies": { "undici-types": "~5.26.4" } }, "sha512-g1pZtPhsvGVTwmeVoexWZLTQaOvXwoSq//pTL0DHeNzUDrFnir4fgETdhjhIxjVnN+hKOuh98+E1eMLnUXstFg=="],
|
||||
|
||||
"@types/ps-tree": ["@types/ps-tree@1.1.6", "", {}, "sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ=="],
|
||||
|
||||
"@types/which": ["@types/which@3.0.3", "", {}, "sha512-2C1+XoY0huExTbs8MQv1DuS5FS86+SEjdM9F/+GS61gg5Hqbtj8ZiDSx8MfWcyei907fIPbfPGCOrNUTnVHY1g=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@6.0.1", "", {}, "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@3.2.1", "", { "dependencies": { "color-convert": "^1.9.0" } }, "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA=="],
|
||||
|
||||
"benchmark": ["benchmark@2.1.4", "", { "dependencies": { "lodash": "^4.17.4", "platform": "^1.3.3" } }, "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ=="],
|
||||
|
||||
"braces": ["braces@3.0.2", "", { "dependencies": { "fill-range": "^7.0.1" } }, "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A=="],
|
||||
|
||||
"browserslist": ["browserslist@4.21.5", "", { "dependencies": { "caniuse-lite": "^1.0.30001449", "electron-to-chromium": "^1.4.284", "node-releases": "^2.0.8", "update-browserslist-db": "^1.0.10" }, "bin": { "browserslist": "cli.js" } }, "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w=="],
|
||||
|
||||
"caniuse-lite": ["caniuse-lite@1.0.30001456", "", {}, "sha512-XFHJY5dUgmpMV25UqaD4kVq2LsiaU5rS8fb0f17pCoXQiQslzmFgnfOxfvo1bTpTqf7dwG/N/05CnLCnOEKmzA=="],
|
||||
|
||||
"chalk": ["chalk@5.3.0", "", {}, "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w=="],
|
||||
|
||||
"color": ["color@4.2.3", "", { "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" } }, "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||
|
||||
"convert-source-map": ["convert-source-map@1.9.0", "", {}, "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.3", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.3.4", "", { "dependencies": { "ms": "2.1.2" } }, "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"duplexer": ["duplexer@0.1.2", "", {}, "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg=="],
|
||||
|
||||
"electron-to-chromium": ["electron-to-chromium@1.4.302", "", {}, "sha512-Uk7C+7aPBryUR1Fwvk9VmipBcN9fVsqBO57jV2ZjTm+IZ6BMNqu7EDVEg2HxCNufk6QcWlFsBkhQyQroB2VWKw=="],
|
||||
|
||||
"emoji-regex": ["emoji-regex@10.3.0", "", {}, "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw=="],
|
||||
|
||||
"esbuild": ["esbuild@0.14.54", "", { "dependencies": { "@esbuild/linux-loong64": "0.14.54", "esbuild-android-64": "0.14.54", "esbuild-android-arm64": "0.14.54", "esbuild-darwin-64": "0.14.54", "esbuild-darwin-arm64": "0.14.54", "esbuild-freebsd-64": "0.14.54", "esbuild-freebsd-arm64": "0.14.54", "esbuild-linux-32": "0.14.54", "esbuild-linux-64": "0.14.54", "esbuild-linux-arm": "0.14.54", "esbuild-linux-arm64": "0.14.54", "esbuild-linux-mips64le": "0.14.54", "esbuild-linux-ppc64le": "0.14.54", "esbuild-linux-riscv64": "0.14.54", "esbuild-linux-s390x": "0.14.54", "esbuild-netbsd-64": "0.14.54", "esbuild-openbsd-64": "0.14.54", "esbuild-sunos-64": "0.14.54", "esbuild-windows-32": "0.14.54", "esbuild-windows-64": "0.14.54", "esbuild-windows-arm64": "0.14.54" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA=="],
|
||||
|
||||
"esbuild-android-64": ["esbuild-android-64@0.14.54", "", { "os": "android", "cpu": "x64" }, "sha512-Tz2++Aqqz0rJ7kYBfz+iqyE3QMycD4vk7LBRyWaAVFgFtQ/O8EJOnVmTOiDWYZ/uYzB4kvP+bqejYdVKzE5lAQ=="],
|
||||
|
||||
"esbuild-android-arm64": ["esbuild-android-arm64@0.14.54", "", { "os": "android", "cpu": "arm64" }, "sha512-F9E+/QDi9sSkLaClO8SOV6etqPd+5DgJje1F9lOWoNncDdOBL2YF59IhsWATSt0TLZbYCf3pNlTHvVV5VfHdvg=="],
|
||||
|
||||
"esbuild-darwin-64": ["esbuild-darwin-64@0.14.54", "", { "os": "darwin", "cpu": "x64" }, "sha512-jtdKWV3nBviOd5v4hOpkVmpxsBy90CGzebpbO9beiqUYVMBtSc0AL9zGftFuBon7PNDcdvNCEuQqw2x0wP9yug=="],
|
||||
|
||||
"esbuild-darwin-arm64": ["esbuild-darwin-arm64@0.14.54", "", { "os": "darwin", "cpu": "arm64" }, "sha512-OPafJHD2oUPyvJMrsCvDGkRrVCar5aVyHfWGQzY1dWnzErjrDuSETxwA2HSsyg2jORLY8yBfzc1MIpUkXlctmw=="],
|
||||
|
||||
"esbuild-freebsd-64": ["esbuild-freebsd-64@0.14.54", "", { "os": "freebsd", "cpu": "x64" }, "sha512-OKwd4gmwHqOTp4mOGZKe/XUlbDJ4Q9TjX0hMPIDBUWWu/kwhBAudJdBoxnjNf9ocIB6GN6CPowYpR/hRCbSYAg=="],
|
||||
|
||||
"esbuild-freebsd-arm64": ["esbuild-freebsd-arm64@0.14.54", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-sFwueGr7OvIFiQT6WeG0jRLjkjdqWWSrfbVwZp8iMP+8UHEHRBvlaxL6IuKNDwAozNUmbb8nIMXa7oAOARGs1Q=="],
|
||||
|
||||
"esbuild-linux-32": ["esbuild-linux-32@0.14.54", "", { "os": "linux", "cpu": "ia32" }, "sha512-1ZuY+JDI//WmklKlBgJnglpUL1owm2OX+8E1syCD6UAxcMM/XoWd76OHSjl/0MR0LisSAXDqgjT3uJqT67O3qw=="],
|
||||
|
||||
"esbuild-linux-64": ["esbuild-linux-64@0.14.54", "", { "os": "linux", "cpu": "x64" }, "sha512-EgjAgH5HwTbtNsTqQOXWApBaPVdDn7XcK+/PtJwZLT1UmpLoznPd8c5CxqsH2dQK3j05YsB3L17T8vE7cp4cCg=="],
|
||||
|
||||
"esbuild-linux-arm": ["esbuild-linux-arm@0.14.54", "", { "os": "linux", "cpu": "arm" }, "sha512-qqz/SjemQhVMTnvcLGoLOdFpCYbz4v4fUo+TfsWG+1aOu70/80RV6bgNpR2JCrppV2moUQkww+6bWxXRL9YMGw=="],
|
||||
|
||||
"esbuild-linux-arm64": ["esbuild-linux-arm64@0.14.54", "", { "os": "linux", "cpu": "arm64" }, "sha512-WL71L+0Rwv+Gv/HTmxTEmpv0UgmxYa5ftZILVi2QmZBgX3q7+tDeOQNqGtdXSdsL8TQi1vIaVFHUPDe0O0kdig=="],
|
||||
|
||||
"esbuild-linux-mips64le": ["esbuild-linux-mips64le@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-qTHGQB8D1etd0u1+sB6p0ikLKRVuCWhYQhAHRPkO+OF3I/iSlTKNNS0Lh2Oc0g0UFGguaFZZiPJdJey3AGpAlw=="],
|
||||
|
||||
"esbuild-linux-ppc64le": ["esbuild-linux-ppc64le@0.14.54", "", { "os": "linux", "cpu": "ppc64" }, "sha512-j3OMlzHiqwZBDPRCDFKcx595XVfOfOnv68Ax3U4UKZ3MTYQB5Yz3X1mn5GnodEVYzhtZgxEBidLWeIs8FDSfrQ=="],
|
||||
|
||||
"esbuild-linux-riscv64": ["esbuild-linux-riscv64@0.14.54", "", { "os": "linux", "cpu": "none" }, "sha512-y7Vt7Wl9dkOGZjxQZnDAqqn+XOqFD7IMWiewY5SPlNlzMX39ocPQlOaoxvT4FllA5viyV26/QzHtvTjVNOxHZg=="],
|
||||
|
||||
"esbuild-linux-s390x": ["esbuild-linux-s390x@0.14.54", "", { "os": "linux", "cpu": "s390x" }, "sha512-zaHpW9dziAsi7lRcyV4r8dhfG1qBidQWUXweUjnw+lliChJqQr+6XD71K41oEIC3Mx1KStovEmlzm+MkGZHnHA=="],
|
||||
|
||||
"esbuild-netbsd-64": ["esbuild-netbsd-64@0.14.54", "", { "os": "none", "cpu": "x64" }, "sha512-PR01lmIMnfJTgeU9VJTDY9ZerDWVFIUzAtJuDHwwceppW7cQWjBBqP48NdeRtoP04/AtO9a7w3viI+PIDr6d+w=="],
|
||||
|
||||
"esbuild-openbsd-64": ["esbuild-openbsd-64@0.14.54", "", { "os": "openbsd", "cpu": "x64" }, "sha512-Qyk7ikT2o7Wu76UsvvDS5q0amJvmRzDyVlL0qf5VLsLchjCa1+IAvd8kTBgUxD7VBUUVgItLkk609ZHUc1oCaw=="],
|
||||
|
||||
"esbuild-sunos-64": ["esbuild-sunos-64@0.14.54", "", { "os": "sunos", "cpu": "x64" }, "sha512-28GZ24KmMSeKi5ueWzMcco6EBHStL3B6ubM7M51RmPwXQGLe0teBGJocmWhgwccA1GeFXqxzILIxXpHbl9Q/Kw=="],
|
||||
|
||||
"esbuild-windows-32": ["esbuild-windows-32@0.14.54", "", { "os": "win32", "cpu": "ia32" }, "sha512-T+rdZW19ql9MjS7pixmZYVObd9G7kcaZo+sETqNH4RCkuuYSuv9AGHUVnPoP9hhuE1WM1ZimHz1CIBHBboLU7w=="],
|
||||
|
||||
"esbuild-windows-64": ["esbuild-windows-64@0.14.54", "", { "os": "win32", "cpu": "x64" }, "sha512-AoHTRBUuYwXtZhjXZbA1pGfTo8cJo3vZIcWGLiUcTNgHpJJMC1rVA44ZereBHMJtotyN71S8Qw0npiCIkW96cQ=="],
|
||||
|
||||
"esbuild-windows-arm64": ["esbuild-windows-arm64@0.14.54", "", { "os": "win32", "cpu": "arm64" }, "sha512-M0kuUvXhot1zOISQGXwWn6YtS+Y/1RT9WrVIOywZnJHo3jCDyewAc79aKNQWFCQm+xNHVTq9h8dZKvygoXQQRg=="],
|
||||
|
||||
"escalade": ["escalade@3.1.1", "", {}, "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="],
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@1.0.5", "", {}, "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg=="],
|
||||
|
||||
"event-stream": ["event-stream@3.3.4", "", { "dependencies": { "duplexer": "~0.1.1", "from": "~0", "map-stream": "~0.1.0", "pause-stream": "0.0.11", "split": "0.3", "stream-combiner": "~0.0.4", "through": "~2.3.1" } }, "sha512-QHpkERcGsR0T7Qm3HNJSyXKEEj8AHNxkY3PK8TS2KJvQ7NiSHe3DDpwVKKtoYprL/AreyzFBeIkBIWChAqn60g=="],
|
||||
|
||||
"eventemitter3": ["eventemitter3@5.0.0", "", {}, "sha512-riuVbElZZNXLeLEoprfNYoDSwTBRR44X3mnhdI1YcnENpWTCsTTVZ2zFuqQcpoyqPQIUXdiPEU0ECAq0KQRaHg=="],
|
||||
|
||||
"execa": ["execa@8.0.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^8.0.1", "human-signals": "^5.0.0", "is-stream": "^3.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^5.1.0", "onetime": "^6.0.0", "signal-exit": "^4.1.0", "strip-final-newline": "^3.0.0" } }, "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.1", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.4" } }, "sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg=="],
|
||||
|
||||
"fastq": ["fastq@1.15.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw=="],
|
||||
|
||||
"fdir": ["fdir@6.1.0", "", { "peerDependencies": { "picomatch": "2.x" } }, "sha512-274qhz5PxNnA/fybOu6apTCUnM0GnO3QazB6VH+oag/7DQskdYq8lm07ZSm90kEQuWYH5GvjAxGruuHrEr0bcg=="],
|
||||
|
||||
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
|
||||
|
||||
"fill-range": ["fill-range@7.0.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ=="],
|
||||
|
||||
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
|
||||
|
||||
"from": ["from@0.1.7", "", {}, "sha512-twe20eF1OxVxp/ML/kq2p1uc6KvFK/+vs8WjEbeKmV2He22MKm7YF2ANIt+EOqhJ5L3K/SuuPhk0hWQDjOM23g=="],
|
||||
|
||||
"fs-extra": ["fs-extra@11.2.0", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw=="],
|
||||
|
||||
"fx": ["fx@31.0.0", "", { "bin": { "fx": "index.js" } }, "sha512-OoeYSPKqNKmfnH4s+rGYI0c8OZmqqOOXsUtqy0YyHqQQoQSDiDs3m3M9uXKx5OQR+jDx7/FhYqpO3kl/As/xgg=="],
|
||||
|
||||
"gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="],
|
||||
|
||||
"get-east-asian-width": ["get-east-asian-width@1.2.0", "", {}, "sha512-2nk+7SIVb14QrgXFHcm84tD4bKQz0RxPuMT8Ag5KPOq7J5fEmAg0UbXdTOSHqNuHSU28k55qnceesxXRZGzKWA=="],
|
||||
|
||||
"get-stream": ["get-stream@8.0.1", "", {}, "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA=="],
|
||||
|
||||
"glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"globals": ["globals@11.12.0", "", {}, "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA=="],
|
||||
|
||||
"globby": ["globby@13.2.2", "", { "dependencies": { "dir-glob": "^3.0.1", "fast-glob": "^3.3.0", "ignore": "^5.2.4", "merge2": "^1.4.1", "slash": "^4.0.0" } }, "sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w=="],
|
||||
|
||||
"graceful-fs": ["graceful-fs@4.2.11", "", {}, "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="],
|
||||
|
||||
"has-flag": ["has-flag@3.0.0", "", {}, "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw=="],
|
||||
|
||||
"human-signals": ["human-signals@5.0.0", "", {}, "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ=="],
|
||||
|
||||
"ignore": ["ignore@5.3.0", "", {}, "sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-stream": ["is-stream@3.0.0", "", {}, "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
|
||||
"js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="],
|
||||
|
||||
"jsesc": ["jsesc@2.5.2", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="],
|
||||
|
||||
"json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="],
|
||||
|
||||
"jsonfile": ["jsonfile@6.1.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ=="],
|
||||
|
||||
"lodash": ["lodash@4.17.21", "", {}, "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="],
|
||||
|
||||
"loose-envify": ["loose-envify@1.4.0", "", { "dependencies": { "js-tokens": "^3.0.0 || ^4.0.0" }, "bin": { "loose-envify": "cli.js" } }, "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q=="],
|
||||
|
||||
"lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="],
|
||||
|
||||
"map-stream": ["map-stream@0.1.0", "", {}, "sha512-CkYQrPYZfWnu/DAmVCpTSX/xHpKZ80eKh2lAkyA6AJTef6bW+6JpbQZN5rofum7da+SyN1bi5ctTm+lTfcCW3g=="],
|
||||
|
||||
"merge-stream": ["merge-stream@2.0.0", "", {}, "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.5", "", { "dependencies": { "braces": "^3.0.2", "picomatch": "^2.3.1" } }, "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA=="],
|
||||
|
||||
"mimic-fn": ["mimic-fn@4.0.0", "", {}, "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw=="],
|
||||
|
||||
"minimist": ["minimist@1.2.8", "", {}, "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA=="],
|
||||
|
||||
"mitata": ["mitata@1.0.25", "", {}, "sha512-0v5qZtVW5vwj9FDvYfraR31BMDcRLkhSFWPTLaxx/Z3/EvScfVtAAWtMI2ArIbBcwh7P86dXh0lQWKiXQPlwYA=="],
|
||||
|
||||
"ms": ["ms@2.1.2", "", {}, "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="],
|
||||
|
||||
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
|
||||
|
||||
"node-fetch": ["node-fetch@3.3.1", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-cRVc/kyto/7E5shrWca1Wsea4y6tL9iYJE5FBCius3JQfb/4P4I295PfhgbJQBLTx6lATE4z+wK0rPM4VS2uow=="],
|
||||
|
||||
"node-releases": ["node-releases@2.0.10", "", {}, "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w=="],
|
||||
|
||||
"npm-run-path": ["npm-run-path@5.2.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-W4/tgAXFqFA0iL7fk0+uQ3g7wkL8xJmx3XdK0VGb4cHW//eZTtKGvFBBoRKVTpY7n6ze4NL9ly7rgXcHufqXKg=="],
|
||||
|
||||
"onetime": ["onetime@6.0.0", "", { "dependencies": { "mimic-fn": "^4.0.0" } }, "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||
|
||||
"pause-stream": ["pause-stream@0.0.11", "", { "dependencies": { "through": "~2.3" } }, "sha512-e3FBlXLmN/D1S+zHzanP4E/4Z60oFAa3O051qt1pxa7DEJWKAyil6upYVXCWadEnuoqa4Pkc9oUx9zsxYeRv8A=="],
|
||||
|
||||
"picocolors": ["picocolors@1.0.0", "", {}, "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="],
|
||||
|
||||
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"platform": ["platform@1.3.6", "", {}, "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg=="],
|
||||
|
||||
"ps-tree": ["ps-tree@1.2.0", "", { "dependencies": { "event-stream": "=3.3.4" }, "bin": { "ps-tree": "./bin/ps-tree.js" } }, "sha512-0VnamPPYHl4uaU/nSFeZZpR21QAWRz+sRv4iW9+v/GS/J5U5iZB5BNN6J0RMoOvdx2gWM2+ZFMIm58q24e4UYA=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"react": ["react@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ=="],
|
||||
|
||||
"react-dom": ["react-dom@18.3.1", "", { "dependencies": { "loose-envify": "^1.1.0", "scheduler": "^0.23.2" }, "peerDependencies": { "react": "^18.3.1" } }, "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw=="],
|
||||
|
||||
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"scheduler": ["scheduler@0.23.2", "", { "dependencies": { "loose-envify": "^1.1.0" } }, "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ=="],
|
||||
|
||||
"semver": ["semver@6.3.0", "", { "bin": { "semver": "./bin/semver.js" } }, "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw=="],
|
||||
|
||||
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||
|
||||
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||
|
||||
"signal-exit": ["signal-exit@4.1.0", "", {}, "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="],
|
||||
|
||||
"simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
|
||||
|
||||
"slash": ["slash@4.0.0", "", {}, "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew=="],
|
||||
|
||||
"split": ["split@0.3.3", "", { "dependencies": { "through": "2" } }, "sha512-wD2AeVmxXRBoX44wAycgjVpMhvbwdI2aZjCkvfNcH1YqHQvJVa1duWc73OyVGJUc05fhFaTZeQ/PYsrmyH0JVA=="],
|
||||
|
||||
"stream-combiner": ["stream-combiner@0.0.4", "", { "dependencies": { "duplexer": "~0.1.1" } }, "sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw=="],
|
||||
|
||||
"string-width": ["string-width@7.1.0", "", { "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", "strip-ansi": "^7.1.0" } }, "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@7.1.0", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ=="],
|
||||
|
||||
"strip-final-newline": ["strip-final-newline@3.0.0", "", {}, "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw=="],
|
||||
|
||||
"supports-color": ["supports-color@5.5.0", "", { "dependencies": { "has-flag": "^3.0.0" } }, "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow=="],
|
||||
|
||||
"through": ["through@2.3.8", "", {}, "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg=="],
|
||||
|
||||
"tinycolor2": ["tinycolor2@1.6.0", "", {}, "sha512-XPaBkWQJdsf3pLKJV9p4qN/S+fm2Oj8AIPo1BTUhg5oxkvm9+SVEGFdhyOz7tTdUTfvxMiAs4sp6/eZO2Ew+pw=="],
|
||||
|
||||
"to-fast-properties": ["to-fast-properties@2.0.0", "", {}, "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog=="],
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"universalify": ["universalify@2.0.1", "", {}, "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.0.10", "", { "dependencies": { "escalade": "^3.1.1", "picocolors": "^1.0.0" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "browserslist-lint": "cli.js" } }, "sha512-OztqDenkfFkbSG+tRxBeAnCVPckDBcvibKd35yDONx6OU8N7sqgwc7rCbkJ/WcYtVRZ4ba68d6byhC21GFh7sQ=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@3.3.2", "", {}, "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ=="],
|
||||
|
||||
"webpod": ["webpod@0.0.2", "", { "bin": { "webpod": "dist/index.js" } }, "sha512-cSwwQIeg8v4i3p4ajHhwgR7N6VyxAf+KYSSsY6Pd3aETE+xEU4vbitz7qQkB0I321xnhDdgtxuiSfk5r/FVtjg=="],
|
||||
|
||||
"which": ["which@3.0.1", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/which.js" } }, "sha512-XA1b62dzQzLfaEOSQFTCOd5KFf/1VSzZo7/7TUjnya6u0vGGKzU96UQBZTAThCb2j4/xjBAyii1OhRLJEivHvg=="],
|
||||
|
||||
"yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="],
|
||||
|
||||
"yaml": ["yaml@2.3.4", "", {}, "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA=="],
|
||||
|
||||
"zx": ["zx@7.2.3", "", { "dependencies": { "@types/fs-extra": "^11.0.1", "@types/minimist": "^1.2.2", "@types/node": "^18.16.3", "@types/ps-tree": "^1.1.2", "@types/which": "^3.0.0", "chalk": "^5.2.0", "fs-extra": "^11.1.1", "fx": "*", "globby": "^13.1.4", "minimist": "^1.2.8", "node-fetch": "3.3.1", "ps-tree": "^1.2.0", "webpod": "^0", "which": "^3.0.0", "yaml": "^2.2.2" }, "bin": { "zx": "build/cli.js" } }, "sha512-QODu38nLlYXg/B/Gw7ZKiZrvPkEsjPN3LQ5JFXM7h0JvwhEdPNNl+4Ao1y4+o3CLNiDUNcwzQYZ4/Ko7kKzCMA=="],
|
||||
|
||||
"@babel/generator/@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.2", "", { "dependencies": { "@jridgewell/set-array": "^1.0.1", "@jridgewell/sourcemap-codec": "^1.4.10", "@jridgewell/trace-mapping": "^0.3.9" } }, "sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A=="],
|
||||
|
||||
"@babel/highlight/chalk": ["chalk@2.4.2", "", { "dependencies": { "ansi-styles": "^3.2.1", "escape-string-regexp": "^1.0.5", "supports-color": "^5.3.0" } }, "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ=="],
|
||||
|
||||
"ansi-styles/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"npm-run-path/path-key": ["path-key@4.0.0", "", {}, "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ=="],
|
||||
|
||||
"ansi-styles/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
}
|
||||
}
|
||||
171
bench/bundle/.gitignore
vendored
171
bench/bundle/.gitignore
vendored
@@ -1,171 +0,0 @@
|
||||
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
|
||||
|
||||
# Logs
|
||||
|
||||
logs
|
||||
_.log
|
||||
npm-debug.log_
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
|
||||
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
|
||||
|
||||
# Runtime data
|
||||
|
||||
pids
|
||||
_.pid
|
||||
_.seed
|
||||
\*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
|
||||
coverage
|
||||
\*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
|
||||
\*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
|
||||
\*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
|
||||
.nuxt
|
||||
dist
|
||||
|
||||
# Gatsby files
|
||||
|
||||
.cache/
|
||||
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.\*
|
||||
|
||||
esbuild
|
||||
@@ -1,40 +0,0 @@
|
||||
# Bundler benchmark
|
||||
|
||||
This is a performance benchmark of the following bundlers:
|
||||
|
||||
- Bun
|
||||
- esbuild
|
||||
- Parcel 2
|
||||
- Rollup + Terser
|
||||
- Webpack
|
||||
|
||||
It is an exact copy of [`esbuild`'s benchmark](https://github.com/evanw/esbuild/blob/main/Makefile), aside from the fact that Bun [has been added](https://github.com/colinhacks/esbuild/commit/1b928b7981aa7edfadf77fcf8931bb8d6f38cd96). The benchmark bundles 10 copies of the large [three.js](https://threejs.org/), with minification and source maps enabled.
|
||||
|
||||
To run the benchmark:
|
||||
|
||||
```sh
|
||||
$ chmod +x run-bench.sh
|
||||
$ ./run-bench.sh
|
||||
```
|
||||
|
||||
Various output will be written to the console by each bundler. Scan through the results for lines that look like this underneath each bundler output:
|
||||
|
||||
```sh
|
||||
real <number>
|
||||
user <number>
|
||||
sys <number>
|
||||
```
|
||||
|
||||
These lines are generated by the `time` command which is used to benchmark each build.
|
||||
|
||||
## Results
|
||||
|
||||
The `real` results, as run on a 16-inch M1 Macbook Pro:
|
||||
|
||||
| Bundler | Time |
|
||||
| ------- | ------ |
|
||||
| Bun | 0.17s |
|
||||
| esbuild | 0.33s |
|
||||
| Rollup | 18.82s |
|
||||
| Webpack | 26.21 |
|
||||
| Parcel | 17.95s |
|
||||
Binary file not shown.
@@ -1 +0,0 @@
|
||||
console.log("Hello via Bun!");
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"name": "bundle",
|
||||
"module": "index.ts",
|
||||
"type": "module",
|
||||
"devDependencies": {
|
||||
"bun-types": "^0.7.0"
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
git clone git@github.com:colinhacks/esbuild.git
|
||||
cd esbuild
|
||||
make bench-three
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"lib": [
|
||||
"ESNext"
|
||||
],
|
||||
"module": "esnext",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"strict": true,
|
||||
"downlevelIteration": true,
|
||||
"skipLibCheck": true,
|
||||
"jsx": "react-jsx",
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true,
|
||||
"types": [
|
||||
"bun-types" // add Bun global
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import { resolve } from "path";
|
||||
const { write, stdout, file } = Bun;
|
||||
const input = resolve(process.argv[process.argv.length - 1]);
|
||||
|
||||
await write(stdout, file(input));
|
||||
@@ -1,9 +0,0 @@
|
||||
// works in both bun & node
|
||||
import { readFileSync } from "node:fs";
|
||||
const count = parseInt(process.env.ITERATIONS || "1", 10) || 1;
|
||||
|
||||
const arg = process.argv.slice(1);
|
||||
|
||||
// TODO: remove Buffer.from() when readFileSync() returns Buffer
|
||||
|
||||
for (let i = 0; i < count; i++) console.log(arg.map(file => Buffer.from(readFileSync(file, "utf8"))).join(""));
|
||||
@@ -1,6 +0,0 @@
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
|
||||
const input = path.resolve(process.argv[process.argv.length - 1]);
|
||||
|
||||
fs.createReadStream(input).pipe(process.stdout);
|
||||
@@ -1,5 +0,0 @@
|
||||
import path from "path";
|
||||
const input = path.resolve(process.argv[process.argv.length - 2]);
|
||||
const output = path.resolve(process.argv[process.argv.length - 1]);
|
||||
|
||||
await Bun.write(Bun.file(output), Bun.file(input));
|
||||
@@ -1,4 +0,0 @@
|
||||
import { createReadStream, createWriteStream } from "node:fs";
|
||||
|
||||
const arg = process.argv.slice(2);
|
||||
createReadStream(arg[0]).pipe(createWriteStream(arg[1]));
|
||||
@@ -1,34 +0,0 @@
|
||||
import { copyFileSync, statSync, writeFileSync } from "node:fs";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function runner(ready) {
|
||||
for (let size of [1, 10, 100, 1000, 10000, 100000, 1000000, 10000000]) {
|
||||
const rand = new Int32Array(size);
|
||||
for (let i = 0; i < size; i++) {
|
||||
rand[i] = (Math.random() * 1024 * 1024) | 0;
|
||||
}
|
||||
const dest = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
|
||||
const src = `/tmp/fs-test-copy-file-${((Math.random() * 10000000 + 100) | 0).toString(32)}`;
|
||||
writeFileSync(src, Buffer.from(rand.buffer), { encoding: "buffer" });
|
||||
const { size: fileSize } = statSync(src);
|
||||
if (fileSize !== rand.byteLength) {
|
||||
throw new Error("size mismatch");
|
||||
}
|
||||
ready(src, dest, new Uint8Array(rand.buffer));
|
||||
}
|
||||
}
|
||||
runner((src, dest, rand) =>
|
||||
bench(`copyFileSync(${rand.buffer.byteLength} bytes)`, () => {
|
||||
copyFileSync(src, dest);
|
||||
// const output = readFileSync(dest).buffer;
|
||||
|
||||
// for (let i = 0; i < output.length; i++) {
|
||||
// if (output[i] !== rand[i]) {
|
||||
// throw new Error(
|
||||
// "Files are not equal" + " " + output[i] + " " + rand[i] + " " + i
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
}),
|
||||
);
|
||||
await run();
|
||||
@@ -1,5 +0,0 @@
|
||||
import { copyFileSync } from "node:fs";
|
||||
|
||||
const arg = process.argv.slice(2);
|
||||
|
||||
copyFileSync(arg[0], arg[1]);
|
||||
@@ -1,44 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
import crypto from "node:crypto";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
||||
const keylen = { "aes-128-gcm": 16, "aes-192-gcm": 24, "aes-256-gcm": 32 };
|
||||
const sizes = [4 * 1024, 1024 * 1024];
|
||||
const ciphers = ["aes-128-gcm", "aes-192-gcm", "aes-256-gcm"];
|
||||
|
||||
const messages = {};
|
||||
sizes.forEach(size => {
|
||||
messages[size] = Buffer.alloc(size, "b");
|
||||
});
|
||||
|
||||
const keys = {};
|
||||
ciphers.forEach(cipher => {
|
||||
keys[cipher] = crypto.randomBytes(keylen[cipher]);
|
||||
});
|
||||
|
||||
// Fixed IV and AAD
|
||||
const iv = crypto.randomBytes(12);
|
||||
const associate_data = Buffer.alloc(16, "z");
|
||||
|
||||
for (const cipher of ciphers) {
|
||||
for (const size of sizes) {
|
||||
const message = messages[size];
|
||||
const key = keys[cipher];
|
||||
|
||||
bench(`${cipher} ${size / 1024}KB`, () => {
|
||||
const alice = crypto.createCipheriv(cipher, key, iv);
|
||||
alice.setAAD(associate_data);
|
||||
const enc = alice.update(message);
|
||||
alice.final();
|
||||
const tag = alice.getAuthTag();
|
||||
|
||||
const bob = crypto.createDecipheriv(cipher, key, iv);
|
||||
bob.setAuthTag(tag);
|
||||
bob.setAAD(associate_data);
|
||||
bob.update(enc);
|
||||
bob.final();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,24 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
modulusLength: 2048,
|
||||
publicKeyEncoding: {
|
||||
type: "spki",
|
||||
format: "pem",
|
||||
},
|
||||
privateKeyEncoding: {
|
||||
type: "pkcs8",
|
||||
format: "pem",
|
||||
},
|
||||
});
|
||||
|
||||
// Max message size for 2048-bit RSA keys
|
||||
const plaintext = crypto.getRandomValues(Buffer.alloc(214));
|
||||
|
||||
bench("RSA_PKCS1_OAEP_PADDING round-trip", () => {
|
||||
const ciphertext = crypto.publicEncrypt(keyPair.publicKey, plaintext);
|
||||
crypto.privateDecrypt(keyPair.privateKey, ciphertext);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,24 +0,0 @@
|
||||
import { bench, run } from "../runner.mjs";
|
||||
const crypto = require("node:crypto");
|
||||
|
||||
const keyPair = crypto.generateKeyPairSync("rsa", {
|
||||
modulusLength: 2048,
|
||||
publicKeyEncoding: {
|
||||
type: "spki",
|
||||
format: "pem",
|
||||
},
|
||||
privateKeyEncoding: {
|
||||
type: "pkcs8",
|
||||
format: "pem",
|
||||
},
|
||||
});
|
||||
|
||||
// Max message size for 2048-bit RSA keys
|
||||
const plaintext = crypto.getRandomValues(Buffer.alloc(245));
|
||||
|
||||
bench("RSA sign RSA_PKCS1_PADDING round-trip", () => {
|
||||
const sig = crypto.privateEncrypt(keyPair.privateKey, plaintext);
|
||||
crypto.publicDecrypt(keyPair.publicKey, sig);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,53 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// Pre-generate DH params to avoid including setup in benchmarks
|
||||
const dhSize = 1024; // Reduced from 2048 for faster testing
|
||||
const dh = crypto.createDiffieHellman(dhSize);
|
||||
const dhPrime = dh.getPrime();
|
||||
const dhGenerator = dh.getGenerator();
|
||||
|
||||
// Classical Diffie-Hellman
|
||||
bench("DH - generateKeys", () => {
|
||||
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
return alice.generateKeys();
|
||||
});
|
||||
|
||||
bench("DH - computeSecret", () => {
|
||||
// Setup
|
||||
const alice = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createDiffieHellman(dhPrime, dhGenerator);
|
||||
const bobKey = bob.generateKeys();
|
||||
|
||||
// Benchmark just the secret computation
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
// ECDH with prime256v1 (P-256)
|
||||
bench("ECDH-P256 - generateKeys", () => {
|
||||
const ecdh = crypto.createECDH("prime256v1");
|
||||
return ecdh.generateKeys();
|
||||
});
|
||||
|
||||
bench("ECDH-P256 - computeSecret", () => {
|
||||
// Setup
|
||||
const alice = crypto.createECDH("prime256v1");
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createECDH("prime256v1");
|
||||
const bobKey = bob.generateKeys();
|
||||
|
||||
// Benchmark just the secret computation
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
// ECDH with secp384r1 (P-384)
|
||||
bench("ECDH-P384 - computeSecret", () => {
|
||||
const alice = crypto.createECDH("secp384r1");
|
||||
const aliceKey = alice.generateKeys();
|
||||
const bob = crypto.createECDH("secp384r1");
|
||||
const bobKey = bob.generateKeys();
|
||||
return alice.computeSecret(bobKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,44 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
function generateTestKeyPairs() {
|
||||
const curves = crypto.getCurves();
|
||||
const keys = {};
|
||||
|
||||
for (const curve of curves) {
|
||||
const ecdh = crypto.createECDH(curve);
|
||||
ecdh.generateKeys();
|
||||
|
||||
keys[curve] = {
|
||||
compressed: ecdh.getPublicKey("hex", "compressed"),
|
||||
uncompressed: ecdh.getPublicKey("hex", "uncompressed"),
|
||||
instance: ecdh,
|
||||
};
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
const testKeys = generateTestKeyPairs();
|
||||
|
||||
bench("ECDH key format - P256 compressed to uncompressed", () => {
|
||||
const publicKey = testKeys["prime256v1"].compressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "uncompressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P256 uncompressed to compressed", () => {
|
||||
const publicKey = testKeys["prime256v1"].uncompressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "prime256v1", "hex", "hex", "compressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P384 compressed to uncompressed", () => {
|
||||
const publicKey = testKeys["secp384r1"].compressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "uncompressed");
|
||||
});
|
||||
|
||||
bench("ECDH key format - P384 uncompressed to compressed", () => {
|
||||
const publicKey = testKeys["secp384r1"].uncompressed;
|
||||
return crypto.ECDH.convertKey(publicKey, "secp384r1", "hex", "hex", "compressed");
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,50 +0,0 @@
|
||||
import crypto from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
// Sample keys with different lengths
|
||||
const keys = {
|
||||
short: "secret",
|
||||
long: "this-is-a-much-longer-secret-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
|
||||
};
|
||||
|
||||
// Test parameters
|
||||
const salts = ["", "salt"];
|
||||
const infos = ["", "info"];
|
||||
const hashes = ["sha256", "sha512"];
|
||||
const sizes = [10, 1024];
|
||||
|
||||
// Benchmark sync HKDF
|
||||
for (const hash of hashes) {
|
||||
for (const keyName of Object.keys(keys)) {
|
||||
const key = keys[keyName];
|
||||
for (const size of sizes) {
|
||||
bench(`hkdfSync ${hash} ${keyName}-key ${size} bytes`, () => {
|
||||
return crypto.hkdfSync(hash, key, "salt", "info", size);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark different combinations of salt and info
|
||||
for (const salt of salts) {
|
||||
for (const info of infos) {
|
||||
bench(`hkdfSync sha256 with ${salt ? "salt" : "no-salt"} and ${info ? "info" : "no-info"}`, () => {
|
||||
return crypto.hkdfSync("sha256", "secret", salt, info, 64);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Benchmark async HKDF (using promises for cleaner benchmark)
|
||||
// Note: async benchmarks in Mitata require returning a Promise
|
||||
for (const hash of hashes) {
|
||||
bench(`hkdf ${hash} async`, async () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
crypto.hkdf(hash, "secret", "salt", "info", 64, (err, derivedKey) => {
|
||||
if (err) reject(err);
|
||||
else resolve(derivedKey);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
await run();
|
||||
@@ -1,43 +0,0 @@
|
||||
import { checkPrime, checkPrimeSync, generatePrime, generatePrimeSync } from "node:crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
const prime512 = generatePrimeSync(512);
|
||||
const prime2048 = generatePrimeSync(2048);
|
||||
|
||||
bench("checkPrimeSync 512", () => {
|
||||
return checkPrimeSync(prime512);
|
||||
});
|
||||
|
||||
bench("checkPrimeSync 2048", () => {
|
||||
return checkPrimeSync(prime2048);
|
||||
});
|
||||
|
||||
bench("checkPrime 512", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime512, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("checkPrime 2048", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => checkPrime(prime2048, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("generatePrimeSync 512", () => {
|
||||
return generatePrimeSync(512);
|
||||
});
|
||||
|
||||
bench("generatePrimeSync 2048", () => {
|
||||
return generatePrimeSync(2048);
|
||||
});
|
||||
|
||||
bench("generatePrime 512", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(512, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
bench("generatePrime 2048", async () => {
|
||||
const promises = Array.from({ length: 10 }, () => new Promise(resolve => generatePrime(2048, resolve)));
|
||||
await Promise.all(promises);
|
||||
});
|
||||
|
||||
await run();
|
||||
@@ -1,50 +0,0 @@
|
||||
import crypto from "crypto";
|
||||
import { bench, run } from "../runner.mjs";
|
||||
|
||||
bench("randomInt - sync", () => {
|
||||
crypto.randomInt(1000);
|
||||
});
|
||||
|
||||
bench("randomInt - async", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomInt(1000, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randonBytes - 32", () => {
|
||||
crypto.randomBytes(32);
|
||||
});
|
||||
|
||||
bench("randomBytes - 256", () => {
|
||||
crypto.randomBytes(256);
|
||||
});
|
||||
|
||||
const buf = Buffer.alloc(256);
|
||||
|
||||
bench("randomFill - 32", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomFill(buf, 0, 32, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randomFill - 256", async () => {
|
||||
const { promise, resolve } = Promise.withResolvers();
|
||||
crypto.randomFill(buf, 0, 256, () => {
|
||||
resolve();
|
||||
});
|
||||
await promise;
|
||||
});
|
||||
|
||||
bench("randomFillSync - 32", () => {
|
||||
crypto.randomFillSync(buf, 0, 32);
|
||||
});
|
||||
|
||||
bench("randomFillSync - 256", () => {
|
||||
crypto.randomFillSync(buf, 0, 256);
|
||||
});
|
||||
|
||||
await run();
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user